public async Task GivenImportTaskInput_WhenExceptionThrowForCleanData_ThenRetriableExceptionShouldBeThrow() { ImportProcessingTaskInputData inputData = GetInputData(); ImportProcessingProgress progress = new ImportProcessingProgress(); IImportResourceLoader loader = Substitute.For <IImportResourceLoader>(); IResourceBulkImporter importer = Substitute.For <IResourceBulkImporter>(); IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>(); IImportErrorStoreFactory importErrorStoreFactory = Substitute.For <IImportErrorStoreFactory>(); IContextUpdater contextUpdater = Substitute.For <IContextUpdater>(); RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >(); ILoggerFactory loggerFactory = new NullLoggerFactory(); importer.CleanResourceAsync(Arg.Any <ImportProcessingTaskInputData>(), Arg.Any <ImportProcessingProgress>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { throw new InvalidOperationException(); }); progress.NeedCleanData = true; ImportProcessingTask task = new ImportProcessingTask( inputData, progress, loader, importer, importErrorStoreFactory, contextUpdater, contextAccessor, loggerFactory); await Assert.ThrowsAsync <RetriableTaskException>(() => task.ExecuteAsync()); }
public async Task GivenImportTaskInput_WhenStartFromClean_ThenAllResoruceShouldBeImported() { ImportProcessingTaskInputData inputData = GetInputData(); ImportProcessingProgress progress = new ImportProcessingProgress(); await VerifyCommonImportTaskAsync(inputData, progress); }
public async Task GivenImportTaskInput_WhenOperationWasCancelledExceptionThrow_ThenTaskShouldBeCanceled() { ImportProcessingTaskInputData inputData = GetInputData(); ImportProcessingProgress progress = new ImportProcessingProgress(); IImportResourceLoader loader = Substitute.For <IImportResourceLoader>(); IResourceBulkImporter importer = Substitute.For <IResourceBulkImporter>(); IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>(); IImportErrorStoreFactory importErrorStoreFactory = Substitute.For <IImportErrorStoreFactory>(); IContextUpdater contextUpdater = Substitute.For <IContextUpdater>(); RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >(); ILoggerFactory loggerFactory = new NullLoggerFactory(); importer.Import(Arg.Any <Channel <ImportResource> >(), Arg.Any <IImportErrorStore>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { throw new OperationCanceledException(); }); ImportProcessingTask task = new ImportProcessingTask( inputData, progress, loader, importer, importErrorStoreFactory, contextUpdater, contextAccessor, loggerFactory); TaskResultData result = await task.ExecuteAsync(); Assert.Equal(TaskResult.Canceled, result.Result); }
public async Task GivenImportTaskInput_WhenStartFromMiddle_ThenAllResoruceShouldBeImported() { ImportProcessingTaskInputData inputData = GetInputData(); ImportProcessingProgress progress = new ImportProcessingProgress(); progress.SucceedImportCount = 3; progress.FailedImportCount = 1; progress.CurrentIndex = 4; await VerifyCommonImportTaskAsync(inputData, progress); }
public async Task GivenImportTaskInput_WhenExceptionThrowForLoad_ThenRetriableExceptionShouldBeThrow() { ImportProcessingTaskInputData inputData = GetInputData(); ImportProcessingProgress progress = new ImportProcessingProgress(); IImportResourceLoader loader = Substitute.For <IImportResourceLoader>(); IResourceBulkImporter importer = Substitute.For <IResourceBulkImporter>(); IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>(); IImportErrorStoreFactory importErrorStoreFactory = Substitute.For <IImportErrorStoreFactory>(); IContextUpdater contextUpdater = Substitute.For <IContextUpdater>(); RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >(); ILoggerFactory loggerFactory = new NullLoggerFactory(); loader.LoadResources(Arg.Any <string>(), Arg.Any <long>(), Arg.Any <string>(), Arg.Any <Func <long, long> >(), Arg.Any <CancellationToken>()) .Returns(callInfo => { long startIndex = (long)callInfo[1]; Func <long, long> idGenerator = (Func <long, long>)callInfo[3]; Channel <ImportResource> resourceChannel = Channel.CreateUnbounded <ImportResource>(); Task loadTask = Task.Run(() => { try { throw new InvalidOperationException(); } finally { resourceChannel.Writer.Complete(); } }); return(resourceChannel, loadTask); }); ImportProcessingTask task = new ImportProcessingTask( inputData, progress, loader, importer, importErrorStoreFactory, contextUpdater, contextAccessor, loggerFactory); await Assert.ThrowsAsync <RetriableTaskException>(() => task.ExecuteAsync()); }
private async Task <Task <ImportProcessingProgress> > EnqueueTaskAsync(Queue <Task <ImportProcessingProgress> > importTasks, Func <Task <ImportProcessingProgress> > newTaskFactory, Channel <ImportProcessingProgress> progressChannel) { while (importTasks.Count >= _importTaskConfiguration.SqlMaxImportOperationConcurrentCount) { ImportProcessingProgress progress = await importTasks.Dequeue(); if (progress != null) { await progressChannel.Writer.WriteAsync(progress); } } Task <ImportProcessingProgress> newTask = newTaskFactory(); importTasks.Enqueue(newTask); return(newTask); }
public ITask Create(TaskInfo taskInfo) { EnsureArg.IsNotNull(taskInfo, nameof(taskInfo)); if (taskInfo.TaskTypeId == ImportProcessingTask.ImportProcessingTaskId) { IContextUpdater contextUpdater = _contextUpdaterFactory.CreateContextUpdater(taskInfo.TaskId, taskInfo.RunId); ImportProcessingTaskInputData inputData = JsonConvert.DeserializeObject <ImportProcessingTaskInputData>(taskInfo.InputData); ImportProcessingProgress importProgress = string.IsNullOrEmpty(taskInfo.Context) ? new ImportProcessingProgress() : JsonConvert.DeserializeObject <ImportProcessingProgress>(taskInfo.Context); return(new ImportProcessingTask( inputData, importProgress, _importResourceLoader, _resourceBulkImporter, _importErrorStoreFactory, contextUpdater, _contextAccessor, _loggerFactory)); } if (taskInfo.TaskTypeId == ImportOrchestratorTask.ImportOrchestratorTaskId) { IContextUpdater contextUpdater = _contextUpdaterFactory.CreateContextUpdater(taskInfo.TaskId, taskInfo.RunId); ImportOrchestratorTaskInputData inputData = JsonConvert.DeserializeObject <ImportOrchestratorTaskInputData>(taskInfo.InputData); ImportOrchestratorTaskContext orchestratorTaskProgress = string.IsNullOrEmpty(taskInfo.Context) ? new ImportOrchestratorTaskContext() : JsonConvert.DeserializeObject <ImportOrchestratorTaskContext>(taskInfo.Context); return(new ImportOrchestratorTask( _mediator, inputData, orchestratorTaskProgress, _taskmanager, _sequenceIdGenerator, contextUpdater, _contextAccessor, _importOrchestratorTaskDataStoreOperation, _integrationDataStoreClient, _loggerFactory)); } return(null); }
private async Task <ImportProcessingProgress> UploadImportErrorsAsync(IImportErrorStore importErrorStore, long succeedCount, long failedCount, string[] importErrors, long lastIndex, CancellationToken cancellationToken) { try { await importErrorStore.UploadErrorsAsync(importErrors, cancellationToken); } catch (Exception ex) { _logger.LogInformation(ex, "Failed to upload error logs."); throw; } ImportProcessingProgress progress = new ImportProcessingProgress(); progress.SucceedImportCount = succeedCount; progress.FailedImportCount = failedCount; progress.CurrentIndex = lastIndex + 1; // Return progress for checkpoint progress return(progress); }
public async Task GivenImportTaskInput_WhenExceptionThrowForImport_ThenContextShouldBeUpdatedBeforeFailure() { long currentIndex = 100; ImportProcessingTaskInputData inputData = GetInputData(); ImportProcessingProgress progress = new ImportProcessingProgress(); IImportResourceLoader loader = Substitute.For <IImportResourceLoader>(); IResourceBulkImporter importer = Substitute.For <IResourceBulkImporter>(); IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>(); IImportErrorStoreFactory importErrorStoreFactory = Substitute.For <IImportErrorStoreFactory>(); IContextUpdater contextUpdater = Substitute.For <IContextUpdater>(); RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >(); ILoggerFactory loggerFactory = new NullLoggerFactory(); loader.LoadResources(Arg.Any <string>(), Arg.Any <long>(), Arg.Any <string>(), Arg.Any <Func <long, long> >(), Arg.Any <CancellationToken>()) .Returns(callInfo => { long startIndex = (long)callInfo[1]; Func <long, long> idGenerator = (Func <long, long>)callInfo[3]; Channel <ImportResource> resourceChannel = Channel.CreateUnbounded <ImportResource>(); resourceChannel.Writer.Complete(); return(resourceChannel, Task.CompletedTask); }); importer.Import(Arg.Any <Channel <ImportResource> >(), Arg.Any <IImportErrorStore>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { Channel <ImportProcessingProgress> progressChannel = Channel.CreateUnbounded <ImportProcessingProgress>(); Task loadTask = Task.Run(async() => { try { ImportProcessingProgress progress = new ImportProcessingProgress(); progress.CurrentIndex = currentIndex; await progressChannel.Writer.WriteAsync(progress); throw new InvalidOperationException(); } finally { progressChannel.Writer.Complete(); } }); return(progressChannel, loadTask); }); string context = null; contextUpdater.UpdateContextAsync(Arg.Any <string>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { context = (string)callInfo[0]; return(Task.CompletedTask); }); ImportProcessingTask task = new ImportProcessingTask( inputData, progress, loader, importer, importErrorStoreFactory, contextUpdater, contextAccessor, loggerFactory); await Assert.ThrowsAsync <RetriableTaskException>(() => task.ExecuteAsync()); ImportProcessingProgress progressForContext = JsonConvert.DeserializeObject <ImportProcessingProgress>(context); Assert.Equal(progressForContext.CurrentIndex, currentIndex); }
private static async Task VerifyCommonImportTaskAsync(ImportProcessingTaskInputData inputData, ImportProcessingProgress progress) { long startIndexFromProgress = progress.CurrentIndex; long succeedCountFromProgress = progress.SucceedImportCount; long failedCountFromProgress = progress.FailedImportCount; IImportResourceLoader loader = Substitute.For <IImportResourceLoader>(); IResourceBulkImporter importer = Substitute.For <IResourceBulkImporter>(); IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>(); IImportErrorStoreFactory importErrorStoreFactory = Substitute.For <IImportErrorStoreFactory>(); IContextUpdater contextUpdater = Substitute.For <IContextUpdater>(); RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >(); ILoggerFactory loggerFactory = new NullLoggerFactory(); long cleanStart = -1; long cleanEnd = -1; importer.CleanResourceAsync(Arg.Any <ImportProcessingTaskInputData>(), Arg.Any <ImportProcessingProgress>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { var inputData = (ImportProcessingTaskInputData)callInfo[0]; var progress = (ImportProcessingProgress)callInfo[1]; long beginSequenceId = inputData.BeginSequenceId; long endSequenceId = inputData.EndSequenceId; long endIndex = progress.CurrentIndex; cleanStart = beginSequenceId + endIndex; cleanEnd = endSequenceId; return(Task.CompletedTask); }); loader.LoadResources(Arg.Any <string>(), Arg.Any <long>(), Arg.Any <string>(), Arg.Any <Func <long, long> >(), Arg.Any <CancellationToken>()) .Returns(callInfo => { long startIndex = (long)callInfo[1]; Func <long, long> idGenerator = (Func <long, long>)callInfo[3]; Channel <ImportResource> resourceChannel = Channel.CreateUnbounded <ImportResource>(); Task loadTask = Task.Run(async() => { ResourceWrapper resourceWrapper = new ResourceWrapper( Guid.NewGuid().ToString(), "0", "Dummy", new RawResource(Guid.NewGuid().ToString(), Fhir.Core.Models.FhirResourceFormat.Json, true), new ResourceRequest("POST"), DateTimeOffset.UtcNow, false, null, null, null, "SearchParam"); await resourceChannel.Writer.WriteAsync(new ImportResource(idGenerator(startIndex), startIndex, resourceWrapper)); await resourceChannel.Writer.WriteAsync(new ImportResource(idGenerator(startIndex + 1), startIndex + 1, "Error")); resourceChannel.Writer.Complete(); }); return(resourceChannel, loadTask); }); importer.Import(Arg.Any <Channel <ImportResource> >(), Arg.Any <IImportErrorStore>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { Channel <ImportResource> resourceChannel = (Channel <ImportResource>)callInfo[0]; Channel <ImportProcessingProgress> progressChannel = Channel.CreateUnbounded <ImportProcessingProgress>(); Task loadTask = Task.Run(async() => { ImportProcessingProgress progress = new ImportProcessingProgress(); await foreach (ImportResource resource in resourceChannel.Reader.ReadAllAsync()) { if (string.IsNullOrEmpty(resource.ImportError)) { progress.SucceedImportCount++; } else { progress.FailedImportCount++; } progress.CurrentIndex = resource.Index + 1; } await progressChannel.Writer.WriteAsync(progress); progressChannel.Writer.Complete(); }); return(progressChannel, loadTask); }); string context = null; contextUpdater.UpdateContextAsync(Arg.Any <string>(), Arg.Any <CancellationToken>()) .Returns(callInfo => { context = (string)callInfo[0]; return(Task.CompletedTask); }); progress.NeedCleanData = true; ImportProcessingTask task = new ImportProcessingTask( inputData, progress, loader, importer, importErrorStoreFactory, contextUpdater, contextAccessor, loggerFactory); TaskResultData taskResult = await task.ExecuteAsync(); Assert.Equal(TaskResult.Success, taskResult.Result); ImportProcessingTaskResult result = JsonConvert.DeserializeObject <ImportProcessingTaskResult>(taskResult.ResultData); Assert.Equal(1 + failedCountFromProgress, result.FailedCount); Assert.Equal(1 + succeedCountFromProgress, result.SucceedCount); ImportProcessingProgress progressForContext = JsonConvert.DeserializeObject <ImportProcessingProgress>(context); Assert.Equal(progressForContext.SucceedImportCount, result.SucceedCount); Assert.Equal(progressForContext.FailedImportCount, result.FailedCount); Assert.Equal(startIndexFromProgress + 2, progressForContext.CurrentIndex); Assert.Equal(startIndexFromProgress, cleanStart); Assert.Equal(inputData.EndSequenceId, cleanEnd); }
private static async Task VerifyBulkImporterBehaviourAsync(Channel <ImportResource> inputs, long expectedSucceedCount, long expectedFailedCount, long expectedEndIndex, int maxResourceCountInBatch, int checkpointBatchCount, int maxConcurrentCount) { DataTable table1 = new DataTable(); DataTable table2 = new DataTable(); DataTable dupTable = new DataTable(); List <SqlBulkCopyDataWrapper> importedResources = new List <SqlBulkCopyDataWrapper>(); ISqlImportOperation testFhirDataBulkOperation = Substitute.For <ISqlImportOperation>(); testFhirDataBulkOperation .When(t => t.BulkCopyDataAsync(Arg.Any <DataTable>(), Arg.Any <CancellationToken>())) .Do(call => { DataTable table = (DataTable)call[0]; if (table.TableName.Equals("Table1")) { table1.Merge(table); } else if (table.TableName.Equals("Table2")) { table2.Merge(table); } else if (table.TableName.Equals("Dup")) { dupTable.Merge(table); } }); testFhirDataBulkOperation .BulkMergeResourceAsync(Arg.Any <IEnumerable <SqlBulkCopyDataWrapper> >(), Arg.Any <CancellationToken>()) .Returns(call => { IEnumerable <SqlBulkCopyDataWrapper> resources = (IEnumerable <SqlBulkCopyDataWrapper>)call[0]; importedResources.AddRange(resources); return(resources); }); IImportErrorSerializer errorSerializer = Substitute.For <IImportErrorSerializer>(); ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For <ISqlBulkCopyDataWrapperFactory>(); dataWrapperFactory.CreateSqlBulkCopyDataWrapper(Arg.Any <ImportResource>()) .Returns((callInfo) => { ImportResource resource = (ImportResource)callInfo[0]; return(new SqlBulkCopyDataWrapper() { ResourceSurrogateId = resource.Id, }); }); List <TableBulkCopyDataGenerator> generators = new List <TableBulkCopyDataGenerator>() { new TestDataGenerator("Table1", 1), new TestDataGenerator("Table2", 2), new TestDupDataGenerator("Dup"), }; IOptions <OperationsConfiguration> operationsConfiguration = Substitute.For <IOptions <OperationsConfiguration> >(); OperationsConfiguration operationsConfig = new OperationsConfiguration(); operationsConfig.Import.SqlBatchSizeForImportResourceOperation = maxResourceCountInBatch; operationsConfig.Import.SqlMaxImportOperationConcurrentCount = maxConcurrentCount; operationsConfig.Import.SqlImportBatchSizeForCheckpoint = checkpointBatchCount; operationsConfiguration.Value.Returns(operationsConfig); SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger <SqlResourceBulkImporter> .Instance); List <string> errorLogs = new List <string>(); IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>(); importErrorStore.When(t => t.UploadErrorsAsync(Arg.Any <string[]>(), Arg.Any <CancellationToken>())) .Do(call => { string[] errors = (string[])call[0]; errorLogs.AddRange(errors); }); (Channel <ImportProcessingProgress> progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None); ImportProcessingProgress finalProgress = new ImportProcessingProgress(); await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) { Assert.True(finalProgress.CurrentIndex <= progress.CurrentIndex); finalProgress = progress; } await importTask; Assert.Equal(expectedSucceedCount, finalProgress.SucceedImportCount); Assert.Equal(expectedFailedCount, finalProgress.FailedImportCount); Assert.Equal(expectedEndIndex, finalProgress.CurrentIndex); Assert.Equal(expectedSucceedCount, importedResources.Count); Assert.Equal(expectedSucceedCount, table1.Rows.Count); Assert.Equal(expectedSucceedCount * 2, table2.Rows.Count); Assert.Equal(expectedSucceedCount, dupTable.Rows.Count); Assert.Equal(expectedFailedCount, errorLogs.Count); }
private async Task ImportInternalAsync(Channel <ImportResource> inputChannel, Channel <ImportProcessingProgress> outputChannel, IImportErrorStore importErrorStore, CancellationToken cancellationToken) { try { _logger.LogInformation("Start to import data to SQL data store."); Task <ImportProcessingProgress> checkpointTask = Task.FromResult <ImportProcessingProgress>(null); long succeedCount = 0; long failedCount = 0; long?lastCheckpointIndex = null; long currentIndex = -1; Dictionary <string, DataTable> resourceParamsBuffer = new Dictionary <string, DataTable>(); List <string> importErrorBuffer = new List <string>(); Queue <Task <ImportProcessingProgress> > importTasks = new Queue <Task <ImportProcessingProgress> >(); List <ImportResource> resourceBuffer = new List <ImportResource>(); await _sqlBulkCopyDataWrapperFactory.EnsureInitializedAsync(); await foreach (ImportResource resource in inputChannel.Reader.ReadAllAsync(cancellationToken)) { if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(); } lastCheckpointIndex = lastCheckpointIndex ?? resource.Index - 1; currentIndex = resource.Index; resourceBuffer.Add(resource); if (resourceBuffer.Count < _importTaskConfiguration.SqlBatchSizeForImportResourceOperation) { continue; } try { // Handle resources in buffer IEnumerable <ImportResource> resourcesWithError = resourceBuffer.Where(r => r.ContainsError()); IEnumerable <SqlBulkCopyDataWrapper> inputResources = resourceBuffer.Where(r => !r.ContainsError()).Select(r => _sqlBulkCopyDataWrapperFactory.CreateSqlBulkCopyDataWrapper(r)); IEnumerable <SqlBulkCopyDataWrapper> mergedResources = await _sqlImportOperation.BulkMergeResourceAsync(inputResources, cancellationToken); IEnumerable <SqlBulkCopyDataWrapper> duplicateResourcesNotMerged = inputResources.Except(mergedResources); importErrorBuffer.AddRange(resourcesWithError.Select(r => r.ImportError)); FillResourceParamsBuffer(mergedResources, resourceParamsBuffer); AppendDuplicatedResouceErrorToBuffer(duplicateResourcesNotMerged, importErrorBuffer); succeedCount += mergedResources.Count(); failedCount += resourcesWithError.Count() + duplicateResourcesNotMerged.Count(); } finally { foreach (ImportResource importResource in resourceBuffer) { importResource?.CompressedStream?.Dispose(); } resourceBuffer.Clear(); } bool shouldCreateCheckpoint = resource.Index - lastCheckpointIndex >= _importTaskConfiguration.SqlImportBatchSizeForCheckpoint; if (shouldCreateCheckpoint) { // Create checkpoint for all tables not empty string[] tableNameNeedImport = resourceParamsBuffer.Where(r => r.Value.Rows.Count > 0).Select(r => r.Key).ToArray(); foreach (string tableName in tableNameNeedImport) { DataTable dataTable = resourceParamsBuffer[tableName]; resourceParamsBuffer.Remove(tableName); await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel); } // wait previous checkpoint task complete await checkpointTask; // upload error logs for import errors string[] importErrors = importErrorBuffer.ToArray(); importErrorBuffer.Clear(); lastCheckpointIndex = resource.Index; checkpointTask = await EnqueueTaskAsync(importTasks, () => UploadImportErrorsAsync(importErrorStore, succeedCount, failedCount, importErrors, currentIndex, cancellationToken), outputChannel); } else { // import table >= MaxResourceCountInBatch string[] tableNameNeedImport = resourceParamsBuffer.Where(r => r.Value.Rows.Count >= _importTaskConfiguration.SqlBatchSizeForImportParamsOperation).Select(r => r.Key).ToArray(); foreach (string tableName in tableNameNeedImport) { DataTable dataTable = resourceParamsBuffer[tableName]; resourceParamsBuffer.Remove(tableName); await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel); } } } try { // Handle resources in buffer IEnumerable <ImportResource> resourcesWithError = resourceBuffer.Where(r => r.ContainsError()); IEnumerable <SqlBulkCopyDataWrapper> inputResources = resourceBuffer.Where(r => !r.ContainsError()).Select(r => _sqlBulkCopyDataWrapperFactory.CreateSqlBulkCopyDataWrapper(r)); IEnumerable <SqlBulkCopyDataWrapper> mergedResources = await _sqlImportOperation.BulkMergeResourceAsync(inputResources, cancellationToken); IEnumerable <SqlBulkCopyDataWrapper> duplicateResourcesNotMerged = inputResources.Except(mergedResources); importErrorBuffer.AddRange(resourcesWithError.Select(r => r.ImportError)); FillResourceParamsBuffer(mergedResources, resourceParamsBuffer); AppendDuplicatedResouceErrorToBuffer(duplicateResourcesNotMerged, importErrorBuffer); succeedCount += mergedResources.Count(); failedCount += resourcesWithError.Count() + duplicateResourcesNotMerged.Count(); } finally { foreach (ImportResource importResource in resourceBuffer) { importResource?.CompressedStream?.Dispose(); } resourceBuffer.Clear(); } // Import all remain tables string[] allTablesNotNull = resourceParamsBuffer.Where(r => r.Value.Rows.Count > 0).Select(r => r.Key).ToArray(); foreach (string tableName in allTablesNotNull) { DataTable dataTable = resourceParamsBuffer[tableName]; await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel); } // Wait all table import task complete while (importTasks.Count > 0) { await importTasks.Dequeue(); } // Upload remain error logs ImportProcessingProgress progress = await UploadImportErrorsAsync(importErrorStore, succeedCount, failedCount, importErrorBuffer.ToArray(), currentIndex, cancellationToken); await outputChannel.Writer.WriteAsync(progress, cancellationToken); } finally { outputChannel.Writer.Complete(); _logger.LogInformation("Import data to SQL data store complete."); } }
public async Task CleanResourceAsync(ImportProcessingTaskInputData inputData, ImportProcessingProgress progress, CancellationToken cancellationToken) { if (!progress.NeedCleanData) { // Skip clean data step for first run. return; } long beginSequenceId = inputData.BeginSequenceId; long endSequenceId = inputData.EndSequenceId; long endIndex = progress.CurrentIndex; try { await _sqlBulkCopyDataWrapperFactory.EnsureInitializedAsync(); await _sqlImportOperation.CleanBatchResourceAsync(inputData.ResourceType, beginSequenceId + endIndex, endSequenceId, cancellationToken); } catch (OperationCanceledException) { throw; } catch (Exception ex) { _logger.LogInformation(ex, "Failed to clean batch resource."); throw new RetriableTaskException("Failed to clean resource before import task start.", ex); } }