public async Task ImportObservations(
            [QueueTrigger(ImportsAvailableQueue)] ImportObservationsMessage message)
        {
            try
            {
                await _fileImportService.ImportObservations(message, DbUtils.CreateStatisticsDbContext());
            }
            catch (Exception e)
            {
                // If deadlock exception then throw & try up to 3 times
                if (e is SqlException exception && exception.Number == 1205)
                {
                    _logger.LogInformation($"{GetType().Name} : Handling known exception when processing Import: " +
                                           $"{message.Id} : {exception.Message} : transaction will be retried");
                    throw;
                }

                var ex = GetInnerException(e);

                _logger.LogError(ex, $"{GetType().Name} function FAILED for : Import: " +
                                 $"{message.Id} : {ex.Message}");

                await _dataImportService.FailImport(message.Id);
            }
        }
예제 #2
0
        public async Task CheckComplete_SingleDataFileCompleted_AlreadyFinished()
        {
            await FinishedStatuses.ForEachAsync(async finishedStatus =>
            {
                var message = new ImportObservationsMessage
                {
                    ReleaseId    = Guid.NewGuid(),
                    NumBatches   = 1,
                    DataFileName = "my_data_file.csv",
                    TotalRows    = 2,
                    SubjectId    = Guid.NewGuid()
                };

                var importStatusService = new Mock <IImportStatusService>(Strict);

                var service = BuildFileImportService(
                    importStatusService: importStatusService.Object);

                importStatusService
                .Setup(s => s.GetImportStatus(message.ReleaseId, message.DataFileName))
                .ReturnsAsync(new ImportStatus
                {
                    Status = finishedStatus
                });

                var dbContext = StatisticsDbUtils.InMemoryStatisticsDbContext();

                await using (dbContext)
                {
                    await service.CheckComplete(message.ReleaseId, message, dbContext);
                }

                MockUtils.VerifyAllMocks(importStatusService);
            });
        }
        public async Task ImportObservations(ImportObservationsMessage message, StatisticsDbContext context)
        {
            var import = await _dataImportService.GetImport(message.Id);

            _logger.LogInformation($"Importing Observations for {import.File.Filename} batchNo {message.BatchNo}");

            if (import.Status.IsFinished())
            {
                _logger.LogInformation($"Import for {import.File.Filename} already finished with state " +
                                       $"{import.Status} - ignoring Observations in file {message.ObservationsFilePath}");
                return;
            }

            if (import.Status == CANCELLING)
            {
                _logger.LogInformation($"Import for {import.File.Filename} is " +
                                       $"{import.Status} - ignoring Observations in file {message.ObservationsFilePath} " +
                                       "and marking import as CANCELLED");

                await _dataImportService.UpdateStatus(message.Id, CANCELLED, 100);

                return;
            }

            var subject = await context.Subject.FindAsync(import.SubjectId);

            var datafileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, message.ObservationsFilePath);

            var dataFileTable = DataTableUtils.CreateFromStream(datafileStream);

            var metaFileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, import.MetaFile.Path());

            var metaFileTable = DataTableUtils.CreateFromStream(metaFileStream);

            await context.Database.CreateExecutionStrategy().Execute(async() =>
            {
                await using var transaction = await context.Database.BeginTransactionAsync();

                await _importerService.ImportObservations(
                    import,
                    dataFileTable.Columns,
                    dataFileTable.Rows,
                    subject,
                    _importerService.GetMeta(metaFileTable, subject, context),
                    message.BatchNo,
                    context
                    );

                await transaction.CommitAsync();
                await context.Database.CloseConnectionAsync();
            });

            if (import.NumBatches > 1)
            {
                await _blobStorageService.DeleteBlob(PrivateReleaseFiles, message.ObservationsFilePath);
            }

            await CheckComplete(message, context);
        }
예제 #4
0
        public async Task CheckComplete_LastBatchFileCompleted_HasErrors()
        {
            var message = new ImportObservationsMessage
            {
                ReleaseId    = Guid.NewGuid(),
                NumBatches   = 2,
                DataFileName = "my_data_file.csv",
                TotalRows    = 2,
                SubjectId    = Guid.NewGuid()
            };

            var importStatusService = new Mock <IImportStatusService>(Strict);
            var fileStorageService  = new Mock <IFileStorageService>();

            var service = BuildFileImportService(
                importStatusService: importStatusService.Object,
                fileStorageService: fileStorageService.Object);

            importStatusService
            .Setup(s => s.GetImportStatus(message.ReleaseId, message.DataFileName))
            .ReturnsAsync(new ImportStatus
            {
                Status = STAGE_4,
                Errors = "an error"
            });

            fileStorageService
            .Setup(s => s.GetNumBatchesRemaining(message.ReleaseId, message.DataFileName))
            .ReturnsAsync(0);

            importStatusService
            .Setup(s => s.UpdateStatus(
                       message.ReleaseId, message.DataFileName, FAILED, 100))
            .Returns(Task.CompletedTask);

            var dbContext = StatisticsDbUtils.InMemoryStatisticsDbContext();

            await using (dbContext)
            {
                await dbContext.Observation.AddRangeAsync(
                    new Observation
                {
                    SubjectId = message.SubjectId
                },
                    new Observation
                {
                    SubjectId = message.SubjectId
                });

                await dbContext.SaveChangesAsync();

                await service.CheckComplete(message.ReleaseId, message, dbContext);
            }

            MockUtils.VerifyAllMocks(importStatusService, fileStorageService);
        }
예제 #5
0
        public async Task CheckComplete_SingleDataFileCompleted_HasIncorrectObservationCount()
        {
            var message = new ImportObservationsMessage
            {
                ReleaseId    = Guid.NewGuid(),
                NumBatches   = 1,
                DataFileName = "my_data_file.csv",
                TotalRows    = 3,
                SubjectId    = Guid.NewGuid()
            };

            var importStatusService = new Mock <IImportStatusService>(Strict);
            var batchService        = new Mock <IBatchService>(Strict);

            var service = BuildFileImportService(
                importStatusService: importStatusService.Object,
                batchService: batchService.Object);

            importStatusService
            .Setup(s => s.GetImportStatus(message.ReleaseId, message.DataFileName))
            .ReturnsAsync(new ImportStatus
            {
                Status = STAGE_4,
            });

            batchService
            .Setup(s => s.FailImport(message.ReleaseId, message.DataFileName, new List <ValidationError>
            {
                new ValidationError(
                    $"Number of observations inserted (2) " +
                    $"does not equal that expected ({message.TotalRows}) : Please delete & retry"
                    )
            }))
            .Returns(Task.CompletedTask);

            var dbContext = StatisticsDbUtils.InMemoryStatisticsDbContext();

            await using (dbContext)
            {
                await dbContext.Observation.AddRangeAsync(
                    new Observation
                {
                    SubjectId = message.SubjectId
                },
                    new Observation
                {
                    SubjectId = message.SubjectId
                });

                await dbContext.SaveChangesAsync();

                await service.CheckComplete(message.ReleaseId, message, dbContext);
            }

            MockUtils.VerifyAllMocks(importStatusService, batchService);
        }
        public async Task CheckComplete(ImportObservationsMessage message, StatisticsDbContext context)
        {
            var import = await _dataImportService.GetImport(message.Id);

            if (import.Status.IsFinished())
            {
                _logger.LogInformation($"Import for {import.File.Filename} is already finished in " +
                                       $"state {import.Status} - not attempting to mark as completed or failed");
                return;
            }

            if (import.Status.IsAborting())
            {
                _logger.LogInformation($"Import for {import.File.Filename} is trying to abort in " +
                                       $"state {import.Status} - not attempting to mark as completed or failed, but " +
                                       $"instead marking as {import.Status.GetFinishingStateOfAbortProcess()}, the final " +
                                       $"state of the aborting process");

                await _dataImportService.UpdateStatus(message.Id, import.Status.GetFinishingStateOfAbortProcess(), 100);

                return;
            }

            if (import.NumBatches == 1 || await _batchService.GetNumBatchesRemaining(import.File) == 0)
            {
                var observationCount = context.Observation.Count(o => o.SubjectId.Equals(import.SubjectId));

                if (!observationCount.Equals(import.TotalRows))
                {
                    await _dataImportService.FailImport(message.Id,
                                                        $"Number of observations inserted ({observationCount}) " +
                                                        $"does not equal that expected ({import.TotalRows}) : Please delete & retry");
                }
                else
                {
                    if (import.Errors.Count == 0)
                    {
                        await _dataImportService.UpdateStatus(message.Id, COMPLETE, 100);
                    }
                    else
                    {
                        await _dataImportService.UpdateStatus(message.Id, FAILED, 100);
                    }
                }
            }
            else
            {
                var numBatchesRemaining = await _batchService.GetNumBatchesRemaining(import.File);

                var percentageComplete = (double)(import.NumBatches - numBatchesRemaining) / import.NumBatches * 100;

                await _dataImportService.UpdateStatus(message.Id, STAGE_4, percentageComplete);
            }
        }
        public async Task CheckComplete_BatchedFilesStillProcessing()
        {
            var file = new File
            {
                Id       = Guid.NewGuid(),
                Filename = "my_data_file.csv"
            };

            var import = new DataImport
            {
                Id         = Guid.NewGuid(),
                Errors     = new List <DataImportError>(),
                FileId     = file.Id,
                File       = file,
                SubjectId  = Guid.NewGuid(),
                Status     = STAGE_4,
                NumBatches = 2,
                TotalRows  = 2
            };

            var batchService      = new Mock <IBatchService>(Strict);
            var dataImportService = new Mock <IDataImportService>(Strict);

            batchService
            .Setup(s => s.GetNumBatchesRemaining(import.File))
            .ReturnsAsync(1);

            dataImportService
            .Setup(s => s.GetImport(import.Id))
            .ReturnsAsync(import);

            dataImportService
            .Setup(s => s.UpdateStatus(
                       import.Id, STAGE_4, 50))
            .Returns(Task.CompletedTask);

            var statisticsDbContextId = Guid.NewGuid().ToString();

            await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId))
            {
                var service = BuildFileImportService(batchService: batchService.Object,
                                                     dataImportService: dataImportService.Object);

                var message = new ImportObservationsMessage
                {
                    Id = import.Id
                };

                await service.CheckComplete(message, statisticsDbContext);
            }

            MockUtils.VerifyAllMocks(batchService, dataImportService);
        }
        public async Task CheckComplete_LastBatchFileCompleted_Aborting()
        {
            await AbortingStatuses
            .ToAsyncEnumerable()
            .ForEachAwaitAsync(async abortingStatus =>
            {
                var file = new File
                {
                    Id       = Guid.NewGuid(),
                    Filename = "my_data_file.csv"
                };

                var import = new DataImport
                {
                    Id         = Guid.NewGuid(),
                    Errors     = new List <DataImportError>(),
                    FileId     = file.Id,
                    File       = file,
                    SubjectId  = Guid.NewGuid(),
                    Status     = abortingStatus,
                    NumBatches = 2,
                    TotalRows  = 2
                };

                var dataImportService = new Mock <IDataImportService>(Strict);

                dataImportService
                .Setup(s => s.GetImport(import.Id))
                .ReturnsAsync(import);

                dataImportService
                .Setup(s => s.UpdateStatus(
                           import.Id, abortingStatus.GetFinishingStateOfAbortProcess(), 100))
                .Returns(Task.CompletedTask);

                var statisticsDbContextId = Guid.NewGuid().ToString();

                await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId))
                {
                    var service = BuildFileImportService(dataImportService: dataImportService.Object);

                    var message = new ImportObservationsMessage
                    {
                        Id = import.Id
                    };

                    await service.CheckComplete(message, statisticsDbContext);
                }

                MockUtils.VerifyAllMocks(dataImportService);
            });
        }
예제 #9
0
        public async Task CheckComplete_LastBatchFileCompleted_Aborting()
        {
            await AbortingStatuses.ForEachAsync(async abortingStatus =>
            {
                var message = new ImportObservationsMessage
                {
                    ReleaseId    = Guid.NewGuid(),
                    NumBatches   = 2,
                    DataFileName = "my_data_file.csv",
                    TotalRows    = 2,
                    SubjectId    = Guid.NewGuid()
                };

                var importStatusService = new Mock <IImportStatusService>(Strict);
                var fileStorageService  = new Mock <IFileStorageService>();

                var service = BuildFileImportService(
                    importStatusService: importStatusService.Object,
                    fileStorageService: fileStorageService.Object);

                var currentStatus = new ImportStatus
                {
                    Status = abortingStatus
                };

                importStatusService
                .Setup(s => s.GetImportStatus(message.ReleaseId, message.DataFileName))
                .ReturnsAsync(currentStatus);

                importStatusService
                .Setup(s => s.UpdateStatus(
                           message.ReleaseId, message.DataFileName, currentStatus.GetFinishingStateOfAbortProcess(), 100))
                .Returns(Task.CompletedTask);

                var dbContext = StatisticsDbUtils.InMemoryStatisticsDbContext();

                await using (dbContext)
                {
                    await service.CheckComplete(message.ReleaseId, message, dbContext);
                }

                MockUtils.VerifyAllMocks(importStatusService, fileStorageService);
            });
        }
예제 #10
0
        public async Task CheckComplete_BatchedFilesStillProcessing()
        {
            var message = new ImportObservationsMessage
            {
                ReleaseId    = Guid.NewGuid(),
                NumBatches   = 2,
                DataFileName = "my_data_file.csv",
                TotalRows    = 2,
                SubjectId    = Guid.NewGuid(),
                BatchNo      = 1
            };

            var importStatusService = new Mock <IImportStatusService>(Strict);
            var fileStorageService  = new Mock <IFileStorageService>();

            var service = BuildFileImportService(
                importStatusService: importStatusService.Object,
                fileStorageService: fileStorageService.Object);

            importStatusService
            .Setup(s => s.GetImportStatus(message.ReleaseId, message.DataFileName))
            .ReturnsAsync(new ImportStatus
            {
                Status = STAGE_4
            });

            fileStorageService
            .Setup(s => s.GetNumBatchesRemaining(message.ReleaseId, message.DataFileName))
            .ReturnsAsync(1);

            importStatusService
            .Setup(s => s.UpdateStatus(
                       message.ReleaseId, message.DataFileName, STAGE_4, 50))
            .Returns(Task.CompletedTask);

            var dbContext = StatisticsDbUtils.InMemoryStatisticsDbContext();

            await using (dbContext)
            {
                await service.CheckComplete(message.ReleaseId, message, dbContext);
            }

            MockUtils.VerifyAllMocks(importStatusService, fileStorageService);
        }
        public async Task CheckComplete_LastBatchFileCompleted_HasIncorrectObservationCount()
        {
            var file = new File
            {
                Id       = Guid.NewGuid(),
                Filename = "my_data_file.csv"
            };

            var import = new DataImport
            {
                Id         = Guid.NewGuid(),
                Errors     = new List <DataImportError>(),
                FileId     = file.Id,
                File       = file,
                SubjectId  = Guid.NewGuid(),
                Status     = STAGE_4,
                NumBatches = 2,
                TotalRows  = 3
            };

            var batchService      = new Mock <IBatchService>(Strict);
            var dataImportService = new Mock <IDataImportService>(Strict);

            batchService
            .Setup(s => s.GetNumBatchesRemaining(import.File))
            .ReturnsAsync(0);

            dataImportService
            .Setup(s => s.GetImport(import.Id))
            .ReturnsAsync(import);

            dataImportService
            .Setup(s => s.FailImport(import.Id,
                                     $"Number of observations inserted (2) does not equal that expected ({import.TotalRows}) : Please delete & retry"))
            .Returns(Task.CompletedTask);

            var statisticsDbContextId = Guid.NewGuid().ToString();

            await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId))
            {
                await statisticsDbContext.Observation.AddRangeAsync(
                    new Observation
                {
                    SubjectId = import.SubjectId
                },
                    new Observation
                {
                    SubjectId = import.SubjectId
                });

                await statisticsDbContext.SaveChangesAsync();
            }

            await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId))
            {
                var service = BuildFileImportService(batchService: batchService.Object,
                                                     dataImportService: dataImportService.Object);

                var message = new ImportObservationsMessage
                {
                    Id = import.Id
                };

                await service.CheckComplete(message, statisticsDbContext);
            }

            MockUtils.VerifyAllMocks(batchService, dataImportService);
        }
        public async Task CheckComplete_LastBatchFileCompleted_HasErrors()
        {
            var file = new File
            {
                Id       = Guid.NewGuid(),
                Filename = "my_data_file.csv"
            };

            var import = new DataImport
            {
                Id     = Guid.NewGuid(),
                Errors = new List <DataImportError>
                {
                    new DataImportError("an error")
                },
                FileId     = file.Id,
                File       = file,
                SubjectId  = Guid.NewGuid(),
                Status     = STAGE_4,
                NumBatches = 2,
                TotalRows  = 2
            };

            var batchService      = new Mock <IBatchService>(Strict);
            var dataImportService = new Mock <IDataImportService>(Strict);

            batchService
            .Setup(s => s.GetNumBatchesRemaining(import.File))
            .ReturnsAsync(0);

            dataImportService
            .Setup(s => s.GetImport(import.Id))
            .ReturnsAsync(import);

            dataImportService
            .Setup(s => s.UpdateStatus(
                       import.Id, FAILED, 100))
            .Returns(Task.CompletedTask);

            var statisticsDbContextId = Guid.NewGuid().ToString();

            await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId))
            {
                await statisticsDbContext.Observation.AddRangeAsync(
                    new Observation
                {
                    SubjectId = import.SubjectId
                },
                    new Observation
                {
                    SubjectId = import.SubjectId
                });

                await statisticsDbContext.SaveChangesAsync();
            }

            await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId))
            {
                var service = BuildFileImportService(batchService: batchService.Object,
                                                     dataImportService: dataImportService.Object);

                var message = new ImportObservationsMessage
                {
                    Id = import.Id
                };

                await service.CheckComplete(message, statisticsDbContext);
            }

            MockUtils.VerifyAllMocks(batchService, dataImportService);
        }
        public async Task ImportObservations(ImportObservationsMessage message, StatisticsDbContext context)
        {
            var releaseId = message.ReleaseId;

            var status = await _importStatusService.GetImportStatus(releaseId, message.DataFileName);

            if (status.IsFinished())
            {
                _logger.LogInformation($"Import for {message.DataFileName} already finished with state " +
                                       $"{status.Status} - ignoring Observations in file {message.ObservationsFilePath}");
                return;
            }

            if (status.Status == CANCELLING)
            {
                _logger.LogInformation($"Import for {message.DataFileName} is CANCELLING " +
                                       $"{status.Status} - ignoring Observations in file {message.ObservationsFilePath} " +
                                       $"and marking import as CANCELLED");

                await _importStatusService.UpdateStatus(releaseId, message.DataFileName, CANCELLED, 100);

                return;
            }

            var subjectData = await _fileStorageService.GetSubjectData(message.ReleaseId, message.ObservationsFilePath);

            var releaseSubject = GetReleaseSubjectLink(message.ReleaseId, message.SubjectId, context);

            await using var datafileStream = await _fileStorageService.StreamBlob(subjectData.DataBlob);

            var dataFileTable = DataTableUtils.CreateFromStream(datafileStream);

            await using var metaFileStream = await _fileStorageService.StreamBlob(subjectData.MetaBlob);

            var metaFileTable = DataTableUtils.CreateFromStream(metaFileStream);

            await context.Database.CreateExecutionStrategy().Execute(async() =>
            {
                await using var transaction = await context.Database.BeginTransactionAsync();

                await _importerService.ImportObservations(
                    dataFileTable.Columns,
                    dataFileTable.Rows,
                    releaseSubject.Subject,
                    _importerService.GetMeta(metaFileTable, releaseSubject.Subject, context),
                    message.BatchNo,
                    message.RowsPerBatch,
                    context
                    );

                await transaction.CommitAsync();
                await context.Database.CloseConnectionAsync();
            });

            if (message.NumBatches > 1)
            {
                await _fileStorageService.DeleteBlobByPath(message.ObservationsFilePath);
            }

            await CheckComplete(releaseId, message, context);
        }
        public async Task CheckComplete(Guid releaseId, ImportObservationsMessage message, StatisticsDbContext context)
        {
            var import = await _importStatusService.GetImportStatus(releaseId, message.DataFileName);

            if (import.IsFinished())
            {
                _logger.LogInformation($"Import for {message.DataFileName} is already finished in " +
                                       $"state {import.Status} - not attempting to mark as completed or failed");
                return;
            }

            if (import.IsAborting())
            {
                _logger.LogInformation($"Import for {message.DataFileName} is trying to abort in " +
                                       $"state {import.Status} - not attempting to mark as completed or failed, but " +
                                       $"instead marking as {import.GetFinishingStateOfAbortProcess()}, the final " +
                                       $"state of the aborting process");

                await _importStatusService.UpdateStatus(releaseId,
                                                        message.DataFileName,
                                                        import.GetFinishingStateOfAbortProcess(),
                                                        100);

                return;
            }

            if (message.NumBatches == 1 ||
                await _fileStorageService.GetNumBatchesRemaining(releaseId, message.DataFileName) == 0)
            {
                var observationCount = context.Observation.Count(o => o.SubjectId.Equals(message.SubjectId));

                if (!observationCount.Equals(message.TotalRows))
                {
                    await _batchService.FailImport(releaseId, message.DataFileName,
                                                   new List <ValidationError>
                    {
                        new ValidationError(
                            $"Number of observations inserted ({observationCount}) " +
                            $"does not equal that expected ({message.TotalRows}) : Please delete & retry"
                            )
                    }.AsEnumerable());
                }
                else
                {
                    if (import.Errors.IsNullOrEmpty())
                    {
                        await _importStatusService.UpdateStatus(releaseId, message.DataFileName, COMPLETE, 100);
                    }
                    else
                    {
                        await _importStatusService.UpdateStatus(releaseId, message.DataFileName, FAILED, 100);
                    }
                }
            }
            else
            {
                var numBatchesRemaining =
                    await _fileStorageService.GetNumBatchesRemaining(releaseId, message.DataFileName);

                var percentageComplete = (double)(message.NumBatches - numBatchesRemaining) / message.NumBatches * 100;

                await _importStatusService.UpdateStatus(releaseId,
                                                        message.DataFileName,
                                                        STAGE_4,
                                                        percentageComplete);
            }
        }