public async Task GivenImportTaskInput_WhenOperationWasCancelledExceptionThrow_ThenTaskShouldBeCanceled()
        {
            ImportProcessingTaskInputData inputData = GetInputData();
            ImportProcessingProgress      progress  = new ImportProcessingProgress();

            IImportResourceLoader    loader                              = Substitute.For <IImportResourceLoader>();
            IResourceBulkImporter    importer                            = Substitute.For <IResourceBulkImporter>();
            IImportErrorStore        importErrorStore                    = Substitute.For <IImportErrorStore>();
            IImportErrorStoreFactory importErrorStoreFactory             = Substitute.For <IImportErrorStoreFactory>();
            IContextUpdater          contextUpdater                      = Substitute.For <IContextUpdater>();
            RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >();
            ILoggerFactory loggerFactory = new NullLoggerFactory();

            importer.Import(Arg.Any <Channel <ImportResource> >(), Arg.Any <IImportErrorStore>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                throw new OperationCanceledException();
            });

            ImportProcessingTask task = new ImportProcessingTask(
                inputData,
                progress,
                loader,
                importer,
                importErrorStoreFactory,
                contextUpdater,
                contextAccessor,
                loggerFactory);

            TaskResultData result = await task.ExecuteAsync();

            Assert.Equal(TaskResult.Canceled, result.Result);
        }
        public async Task GivenSqlBulkImporter_WhenImportDataWithUnExceptedExceptionInErrorLogUpload_ThenChannelShouldBeCompleteAndExceptionShouldThrow()
        {
            Channel <ImportResource> inputs = Channel.CreateUnbounded <ImportResource>();
            await inputs.Writer.WriteAsync(new ImportResource(0, 0, "Error message"));

            inputs.Writer.Complete();

            ISqlImportOperation               testFhirDataBulkOperation = Substitute.For <ISqlImportOperation>();
            ISqlBulkCopyDataWrapperFactory    dataWrapperFactory        = Substitute.For <ISqlBulkCopyDataWrapperFactory>();
            IImportErrorSerializer            errorSerializer           = Substitute.For <IImportErrorSerializer>();
            List <TableBulkCopyDataGenerator> generators = new List <TableBulkCopyDataGenerator>();

            IOptions <OperationsConfiguration> operationsConfiguration = Substitute.For <IOptions <OperationsConfiguration> >();

            operationsConfiguration.Value.Returns(new OperationsConfiguration());

            SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger <SqlResourceBulkImporter> .Instance);

            List <string>     errorLogs        = new List <string>();
            IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>();

            importErrorStore.UploadErrorsAsync(Arg.Any <string[]>(), Arg.Any <CancellationToken>())
            .Returns((_) => throw new InvalidOperationException());

            (Channel <ImportProcessingProgress> progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None);

            await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync())
            {
                // Do nothing...
            }

            await Assert.ThrowsAsync <InvalidOperationException>(() => importTask);
        }
        public async Task GivenImportTaskInput_WhenExceptionThrowForCleanData_ThenRetriableExceptionShouldBeThrow()
        {
            ImportProcessingTaskInputData inputData = GetInputData();
            ImportProcessingProgress      progress  = new ImportProcessingProgress();

            IImportResourceLoader    loader                              = Substitute.For <IImportResourceLoader>();
            IResourceBulkImporter    importer                            = Substitute.For <IResourceBulkImporter>();
            IImportErrorStore        importErrorStore                    = Substitute.For <IImportErrorStore>();
            IImportErrorStoreFactory importErrorStoreFactory             = Substitute.For <IImportErrorStoreFactory>();
            IContextUpdater          contextUpdater                      = Substitute.For <IContextUpdater>();
            RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >();
            ILoggerFactory loggerFactory = new NullLoggerFactory();

            importer.CleanResourceAsync(Arg.Any <ImportProcessingTaskInputData>(), Arg.Any <ImportProcessingProgress>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                throw new InvalidOperationException();
            });

            progress.NeedCleanData = true;
            ImportProcessingTask task = new ImportProcessingTask(
                inputData,
                progress,
                loader,
                importer,
                importErrorStoreFactory,
                contextUpdater,
                contextAccessor,
                loggerFactory);

            await Assert.ThrowsAsync <RetriableTaskException>(() => task.ExecuteAsync());
        }
        public async Task GivenImportTaskInput_WhenExceptionThrowForLoad_ThenRetriableExceptionShouldBeThrow()
        {
            ImportProcessingTaskInputData inputData = GetInputData();
            ImportProcessingProgress      progress  = new ImportProcessingProgress();

            IImportResourceLoader    loader                              = Substitute.For <IImportResourceLoader>();
            IResourceBulkImporter    importer                            = Substitute.For <IResourceBulkImporter>();
            IImportErrorStore        importErrorStore                    = Substitute.For <IImportErrorStore>();
            IImportErrorStoreFactory importErrorStoreFactory             = Substitute.For <IImportErrorStoreFactory>();
            IContextUpdater          contextUpdater                      = Substitute.For <IContextUpdater>();
            RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >();
            ILoggerFactory loggerFactory = new NullLoggerFactory();

            loader.LoadResources(Arg.Any <string>(), Arg.Any <long>(), Arg.Any <string>(), Arg.Any <Func <long, long> >(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                long startIndex = (long)callInfo[1];
                Func <long, long> idGenerator            = (Func <long, long>)callInfo[3];
                Channel <ImportResource> resourceChannel = Channel.CreateUnbounded <ImportResource>();

                Task loadTask = Task.Run(() =>
                {
                    try
                    {
                        throw new InvalidOperationException();
                    }
                    finally
                    {
                        resourceChannel.Writer.Complete();
                    }
                });

                return(resourceChannel, loadTask);
            });

            ImportProcessingTask task = new ImportProcessingTask(
                inputData,
                progress,
                loader,
                importer,
                importErrorStoreFactory,
                contextUpdater,
                contextAccessor,
                loggerFactory);

            await Assert.ThrowsAsync <RetriableTaskException>(() => task.ExecuteAsync());
        }
        private async Task <ImportProcessingProgress> UploadImportErrorsAsync(IImportErrorStore importErrorStore, long succeedCount, long failedCount, string[] importErrors, long lastIndex, CancellationToken cancellationToken)
        {
            try
            {
                await importErrorStore.UploadErrorsAsync(importErrors, cancellationToken);
            }
            catch (Exception ex)
            {
                _logger.LogInformation(ex, "Failed to upload error logs.");
                throw;
            }

            ImportProcessingProgress progress = new ImportProcessingProgress();

            progress.SucceedImportCount = succeedCount;
            progress.FailedImportCount  = failedCount;
            progress.CurrentIndex       = lastIndex + 1;

            // Return progress for checkpoint progress
            return(progress);
        }
        public async Task GivenImportTaskInput_WhenExceptionThrowForImport_ThenContextShouldBeUpdatedBeforeFailure()
        {
            long currentIndex = 100;
            ImportProcessingTaskInputData inputData = GetInputData();
            ImportProcessingProgress      progress  = new ImportProcessingProgress();

            IImportResourceLoader    loader                              = Substitute.For <IImportResourceLoader>();
            IResourceBulkImporter    importer                            = Substitute.For <IResourceBulkImporter>();
            IImportErrorStore        importErrorStore                    = Substitute.For <IImportErrorStore>();
            IImportErrorStoreFactory importErrorStoreFactory             = Substitute.For <IImportErrorStoreFactory>();
            IContextUpdater          contextUpdater                      = Substitute.For <IContextUpdater>();
            RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >();
            ILoggerFactory loggerFactory = new NullLoggerFactory();

            loader.LoadResources(Arg.Any <string>(), Arg.Any <long>(), Arg.Any <string>(), Arg.Any <Func <long, long> >(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                long startIndex = (long)callInfo[1];
                Func <long, long> idGenerator            = (Func <long, long>)callInfo[3];
                Channel <ImportResource> resourceChannel = Channel.CreateUnbounded <ImportResource>();
                resourceChannel.Writer.Complete();

                return(resourceChannel, Task.CompletedTask);
            });

            importer.Import(Arg.Any <Channel <ImportResource> >(), Arg.Any <IImportErrorStore>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                Channel <ImportProcessingProgress> progressChannel = Channel.CreateUnbounded <ImportProcessingProgress>();

                Task loadTask = Task.Run(async() =>
                {
                    try
                    {
                        ImportProcessingProgress progress = new ImportProcessingProgress();
                        progress.CurrentIndex             = currentIndex;

                        await progressChannel.Writer.WriteAsync(progress);
                        throw new InvalidOperationException();
                    }
                    finally
                    {
                        progressChannel.Writer.Complete();
                    }
                });

                return(progressChannel, loadTask);
            });

            string context = null;

            contextUpdater.UpdateContextAsync(Arg.Any <string>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                context = (string)callInfo[0];

                return(Task.CompletedTask);
            });

            ImportProcessingTask task = new ImportProcessingTask(
                inputData,
                progress,
                loader,
                importer,
                importErrorStoreFactory,
                contextUpdater,
                contextAccessor,
                loggerFactory);

            await Assert.ThrowsAsync <RetriableTaskException>(() => task.ExecuteAsync());

            ImportProcessingProgress progressForContext = JsonConvert.DeserializeObject <ImportProcessingProgress>(context);

            Assert.Equal(progressForContext.CurrentIndex, currentIndex);
        }
        private static async Task VerifyCommonImportTaskAsync(ImportProcessingTaskInputData inputData, ImportProcessingProgress progress)
        {
            long startIndexFromProgress   = progress.CurrentIndex;
            long succeedCountFromProgress = progress.SucceedImportCount;
            long failedCountFromProgress  = progress.FailedImportCount;

            IImportResourceLoader    loader                              = Substitute.For <IImportResourceLoader>();
            IResourceBulkImporter    importer                            = Substitute.For <IResourceBulkImporter>();
            IImportErrorStore        importErrorStore                    = Substitute.For <IImportErrorStore>();
            IImportErrorStoreFactory importErrorStoreFactory             = Substitute.For <IImportErrorStoreFactory>();
            IContextUpdater          contextUpdater                      = Substitute.For <IContextUpdater>();
            RequestContextAccessor <IFhirRequestContext> contextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >();
            ILoggerFactory loggerFactory = new NullLoggerFactory();

            long cleanStart = -1;
            long cleanEnd   = -1;

            importer.CleanResourceAsync(Arg.Any <ImportProcessingTaskInputData>(), Arg.Any <ImportProcessingProgress>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                var inputData        = (ImportProcessingTaskInputData)callInfo[0];
                var progress         = (ImportProcessingProgress)callInfo[1];
                long beginSequenceId = inputData.BeginSequenceId;
                long endSequenceId   = inputData.EndSequenceId;
                long endIndex        = progress.CurrentIndex;

                cleanStart = beginSequenceId + endIndex;
                cleanEnd   = endSequenceId;

                return(Task.CompletedTask);
            });

            loader.LoadResources(Arg.Any <string>(), Arg.Any <long>(), Arg.Any <string>(), Arg.Any <Func <long, long> >(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                long startIndex = (long)callInfo[1];
                Func <long, long> idGenerator            = (Func <long, long>)callInfo[3];
                Channel <ImportResource> resourceChannel = Channel.CreateUnbounded <ImportResource>();

                Task loadTask = Task.Run(async() =>
                {
                    ResourceWrapper resourceWrapper = new ResourceWrapper(
                        Guid.NewGuid().ToString(),
                        "0",
                        "Dummy",
                        new RawResource(Guid.NewGuid().ToString(), Fhir.Core.Models.FhirResourceFormat.Json, true),
                        new ResourceRequest("POST"),
                        DateTimeOffset.UtcNow,
                        false,
                        null,
                        null,
                        null,
                        "SearchParam");

                    await resourceChannel.Writer.WriteAsync(new ImportResource(idGenerator(startIndex), startIndex, resourceWrapper));
                    await resourceChannel.Writer.WriteAsync(new ImportResource(idGenerator(startIndex + 1), startIndex + 1, "Error"));
                    resourceChannel.Writer.Complete();
                });

                return(resourceChannel, loadTask);
            });

            importer.Import(Arg.Any <Channel <ImportResource> >(), Arg.Any <IImportErrorStore>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                Channel <ImportResource> resourceChannel           = (Channel <ImportResource>)callInfo[0];
                Channel <ImportProcessingProgress> progressChannel = Channel.CreateUnbounded <ImportProcessingProgress>();

                Task loadTask = Task.Run(async() =>
                {
                    ImportProcessingProgress progress = new ImportProcessingProgress();
                    await foreach (ImportResource resource in resourceChannel.Reader.ReadAllAsync())
                    {
                        if (string.IsNullOrEmpty(resource.ImportError))
                        {
                            progress.SucceedImportCount++;
                        }
                        else
                        {
                            progress.FailedImportCount++;
                        }

                        progress.CurrentIndex = resource.Index + 1;
                    }

                    await progressChannel.Writer.WriteAsync(progress);
                    progressChannel.Writer.Complete();
                });

                return(progressChannel, loadTask);
            });

            string context = null;

            contextUpdater.UpdateContextAsync(Arg.Any <string>(), Arg.Any <CancellationToken>())
            .Returns(callInfo =>
            {
                context = (string)callInfo[0];

                return(Task.CompletedTask);
            });

            progress.NeedCleanData = true;
            ImportProcessingTask task = new ImportProcessingTask(
                inputData,
                progress,
                loader,
                importer,
                importErrorStoreFactory,
                contextUpdater,
                contextAccessor,
                loggerFactory);

            TaskResultData taskResult = await task.ExecuteAsync();

            Assert.Equal(TaskResult.Success, taskResult.Result);
            ImportProcessingTaskResult result = JsonConvert.DeserializeObject <ImportProcessingTaskResult>(taskResult.ResultData);

            Assert.Equal(1 + failedCountFromProgress, result.FailedCount);
            Assert.Equal(1 + succeedCountFromProgress, result.SucceedCount);

            ImportProcessingProgress progressForContext = JsonConvert.DeserializeObject <ImportProcessingProgress>(context);

            Assert.Equal(progressForContext.SucceedImportCount, result.SucceedCount);
            Assert.Equal(progressForContext.FailedImportCount, result.FailedCount);
            Assert.Equal(startIndexFromProgress + 2, progressForContext.CurrentIndex);

            Assert.Equal(startIndexFromProgress, cleanStart);
            Assert.Equal(inputData.EndSequenceId, cleanEnd);
        }
        private static async Task VerifyBulkImporterBehaviourAsync(Channel <ImportResource> inputs, long expectedSucceedCount, long expectedFailedCount, long expectedEndIndex, int maxResourceCountInBatch, int checkpointBatchCount, int maxConcurrentCount)
        {
            DataTable table1   = new DataTable();
            DataTable table2   = new DataTable();
            DataTable dupTable = new DataTable();
            List <SqlBulkCopyDataWrapper> importedResources = new List <SqlBulkCopyDataWrapper>();

            ISqlImportOperation testFhirDataBulkOperation = Substitute.For <ISqlImportOperation>();

            testFhirDataBulkOperation
            .When(t => t.BulkCopyDataAsync(Arg.Any <DataTable>(), Arg.Any <CancellationToken>()))
            .Do(call =>
            {
                DataTable table = (DataTable)call[0];
                if (table.TableName.Equals("Table1"))
                {
                    table1.Merge(table);
                }
                else if (table.TableName.Equals("Table2"))
                {
                    table2.Merge(table);
                }
                else if (table.TableName.Equals("Dup"))
                {
                    dupTable.Merge(table);
                }
            });
            testFhirDataBulkOperation
            .BulkMergeResourceAsync(Arg.Any <IEnumerable <SqlBulkCopyDataWrapper> >(), Arg.Any <CancellationToken>())
            .Returns(call =>
            {
                IEnumerable <SqlBulkCopyDataWrapper> resources = (IEnumerable <SqlBulkCopyDataWrapper>)call[0];
                importedResources.AddRange(resources);

                return(resources);
            });

            IImportErrorSerializer         errorSerializer    = Substitute.For <IImportErrorSerializer>();
            ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For <ISqlBulkCopyDataWrapperFactory>();

            dataWrapperFactory.CreateSqlBulkCopyDataWrapper(Arg.Any <ImportResource>())
            .Returns((callInfo) =>
            {
                ImportResource resource = (ImportResource)callInfo[0];
                return(new SqlBulkCopyDataWrapper()
                {
                    ResourceSurrogateId = resource.Id,
                });
            });

            List <TableBulkCopyDataGenerator> generators = new List <TableBulkCopyDataGenerator>()
            {
                new TestDataGenerator("Table1", 1),
                new TestDataGenerator("Table2", 2),
                new TestDupDataGenerator("Dup"),
            };

            IOptions <OperationsConfiguration> operationsConfiguration = Substitute.For <IOptions <OperationsConfiguration> >();
            OperationsConfiguration            operationsConfig        = new OperationsConfiguration();

            operationsConfig.Import.SqlBatchSizeForImportResourceOperation = maxResourceCountInBatch;
            operationsConfig.Import.SqlMaxImportOperationConcurrentCount   = maxConcurrentCount;
            operationsConfig.Import.SqlImportBatchSizeForCheckpoint        = checkpointBatchCount;
            operationsConfiguration.Value.Returns(operationsConfig);

            SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger <SqlResourceBulkImporter> .Instance);

            List <string>     errorLogs        = new List <string>();
            IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>();

            importErrorStore.When(t => t.UploadErrorsAsync(Arg.Any <string[]>(), Arg.Any <CancellationToken>()))
            .Do(call =>
            {
                string[] errors = (string[])call[0];
                errorLogs.AddRange(errors);
            });
            (Channel <ImportProcessingProgress> progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None);
            ImportProcessingProgress finalProgress = new ImportProcessingProgress();

            await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync())
            {
                Assert.True(finalProgress.CurrentIndex <= progress.CurrentIndex);
                finalProgress = progress;
            }

            await importTask;

            Assert.Equal(expectedSucceedCount, finalProgress.SucceedImportCount);
            Assert.Equal(expectedFailedCount, finalProgress.FailedImportCount);
            Assert.Equal(expectedEndIndex, finalProgress.CurrentIndex);

            Assert.Equal(expectedSucceedCount, importedResources.Count);
            Assert.Equal(expectedSucceedCount, table1.Rows.Count);
            Assert.Equal(expectedSucceedCount * 2, table2.Rows.Count);
            Assert.Equal(expectedSucceedCount, dupTable.Rows.Count);
            Assert.Equal(expectedFailedCount, errorLogs.Count);
        }
        public async Task GivenSqlBulkImporter_WhenImportDataWithUnExceptedExceptionInBulkOpertation_ThenChannelShouldBeCompleteAndExceptionShouldThrow()
        {
            Channel <ImportResource> inputs = Channel.CreateUnbounded <ImportResource>();
            await inputs.Writer.WriteAsync(new ImportResource(0, 0, default(ResourceWrapper)));

            inputs.Writer.Complete();

            ISqlImportOperation testFhirDataBulkOperation = Substitute.For <ISqlImportOperation>();

            testFhirDataBulkOperation
            .BulkCopyDataAsync(Arg.Any <DataTable>(), Arg.Any <CancellationToken>())
            .Returns((callInfo) =>
            {
                throw new InvalidOperationException();
            });
            testFhirDataBulkOperation
            .BulkMergeResourceAsync(Arg.Any <IEnumerable <SqlBulkCopyDataWrapper> >(), Arg.Any <CancellationToken>())
            .Returns(call =>
            {
                IEnumerable <SqlBulkCopyDataWrapper> resources = (IEnumerable <SqlBulkCopyDataWrapper>)call[0];

                return(resources);
            });

            IImportErrorSerializer         errorSerializer    = Substitute.For <IImportErrorSerializer>();
            ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For <ISqlBulkCopyDataWrapperFactory>();

            dataWrapperFactory.CreateSqlBulkCopyDataWrapper(Arg.Any <ImportResource>())
            .Returns((callInfo) =>
            {
                ImportResource resource = (ImportResource)callInfo[0];
                return(new SqlBulkCopyDataWrapper()
                {
                    ResourceSurrogateId = resource.Id,
                });
            });

            List <TableBulkCopyDataGenerator> generators = new List <TableBulkCopyDataGenerator>()
            {
                new TestDataGenerator("Table1", 1),
                new TestDataGenerator("Table2", 2),
            };

            IOptions <OperationsConfiguration> operationsConfiguration = Substitute.For <IOptions <OperationsConfiguration> >();

            operationsConfiguration.Value.Returns(new OperationsConfiguration());

            SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger <SqlResourceBulkImporter> .Instance);

            List <string>     errorLogs        = new List <string>();
            IImportErrorStore importErrorStore = Substitute.For <IImportErrorStore>();

            (Channel <ImportProcessingProgress> progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None);

            await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync())
            {
                // Do nothing...
            }

            await Assert.ThrowsAsync <InvalidOperationException>(() => importTask);
        }
        private async Task ImportInternalAsync(Channel <ImportResource> inputChannel, Channel <ImportProcessingProgress> outputChannel, IImportErrorStore importErrorStore, CancellationToken cancellationToken)
        {
            try
            {
                _logger.LogInformation("Start to import data to SQL data store.");

                Task <ImportProcessingProgress> checkpointTask = Task.FromResult <ImportProcessingProgress>(null);

                long succeedCount        = 0;
                long failedCount         = 0;
                long?lastCheckpointIndex = null;
                long currentIndex        = -1;
                Dictionary <string, DataTable> resourceParamsBuffer = new Dictionary <string, DataTable>();
                List <string> importErrorBuffer = new List <string>();
                Queue <Task <ImportProcessingProgress> > importTasks = new Queue <Task <ImportProcessingProgress> >();

                List <ImportResource> resourceBuffer = new List <ImportResource>();
                await _sqlBulkCopyDataWrapperFactory.EnsureInitializedAsync();

                await foreach (ImportResource resource in inputChannel.Reader.ReadAllAsync(cancellationToken))
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        throw new OperationCanceledException();
                    }

                    lastCheckpointIndex = lastCheckpointIndex ?? resource.Index - 1;
                    currentIndex        = resource.Index;

                    resourceBuffer.Add(resource);
                    if (resourceBuffer.Count < _importTaskConfiguration.SqlBatchSizeForImportResourceOperation)
                    {
                        continue;
                    }

                    try
                    {
                        // Handle resources in buffer
                        IEnumerable <ImportResource>         resourcesWithError = resourceBuffer.Where(r => r.ContainsError());
                        IEnumerable <SqlBulkCopyDataWrapper> inputResources     = resourceBuffer.Where(r => !r.ContainsError()).Select(r => _sqlBulkCopyDataWrapperFactory.CreateSqlBulkCopyDataWrapper(r));
                        IEnumerable <SqlBulkCopyDataWrapper> mergedResources    = await _sqlImportOperation.BulkMergeResourceAsync(inputResources, cancellationToken);

                        IEnumerable <SqlBulkCopyDataWrapper> duplicateResourcesNotMerged = inputResources.Except(mergedResources);

                        importErrorBuffer.AddRange(resourcesWithError.Select(r => r.ImportError));
                        FillResourceParamsBuffer(mergedResources, resourceParamsBuffer);
                        AppendDuplicatedResouceErrorToBuffer(duplicateResourcesNotMerged, importErrorBuffer);

                        succeedCount += mergedResources.Count();
                        failedCount  += resourcesWithError.Count() + duplicateResourcesNotMerged.Count();
                    }
                    finally
                    {
                        foreach (ImportResource importResource in resourceBuffer)
                        {
                            importResource?.CompressedStream?.Dispose();
                        }

                        resourceBuffer.Clear();
                    }

                    bool shouldCreateCheckpoint = resource.Index - lastCheckpointIndex >= _importTaskConfiguration.SqlImportBatchSizeForCheckpoint;
                    if (shouldCreateCheckpoint)
                    {
                        // Create checkpoint for all tables not empty
                        string[] tableNameNeedImport = resourceParamsBuffer.Where(r => r.Value.Rows.Count > 0).Select(r => r.Key).ToArray();

                        foreach (string tableName in tableNameNeedImport)
                        {
                            DataTable dataTable = resourceParamsBuffer[tableName];
                            resourceParamsBuffer.Remove(tableName);
                            await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel);
                        }

                        // wait previous checkpoint task complete
                        await checkpointTask;

                        // upload error logs for import errors
                        string[] importErrors = importErrorBuffer.ToArray();
                        importErrorBuffer.Clear();
                        lastCheckpointIndex = resource.Index;
                        checkpointTask      = await EnqueueTaskAsync(importTasks, () => UploadImportErrorsAsync(importErrorStore, succeedCount, failedCount, importErrors, currentIndex, cancellationToken), outputChannel);
                    }
                    else
                    {
                        // import table >= MaxResourceCountInBatch
                        string[] tableNameNeedImport =
                            resourceParamsBuffer.Where(r => r.Value.Rows.Count >= _importTaskConfiguration.SqlBatchSizeForImportParamsOperation).Select(r => r.Key).ToArray();

                        foreach (string tableName in tableNameNeedImport)
                        {
                            DataTable dataTable = resourceParamsBuffer[tableName];
                            resourceParamsBuffer.Remove(tableName);
                            await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel);
                        }
                    }
                }

                try
                {
                    // Handle resources in buffer
                    IEnumerable <ImportResource>         resourcesWithError = resourceBuffer.Where(r => r.ContainsError());
                    IEnumerable <SqlBulkCopyDataWrapper> inputResources     = resourceBuffer.Where(r => !r.ContainsError()).Select(r => _sqlBulkCopyDataWrapperFactory.CreateSqlBulkCopyDataWrapper(r));
                    IEnumerable <SqlBulkCopyDataWrapper> mergedResources    = await _sqlImportOperation.BulkMergeResourceAsync(inputResources, cancellationToken);

                    IEnumerable <SqlBulkCopyDataWrapper> duplicateResourcesNotMerged = inputResources.Except(mergedResources);
                    importErrorBuffer.AddRange(resourcesWithError.Select(r => r.ImportError));

                    FillResourceParamsBuffer(mergedResources, resourceParamsBuffer);

                    AppendDuplicatedResouceErrorToBuffer(duplicateResourcesNotMerged, importErrorBuffer);
                    succeedCount += mergedResources.Count();
                    failedCount  += resourcesWithError.Count() + duplicateResourcesNotMerged.Count();
                }
                finally
                {
                    foreach (ImportResource importResource in resourceBuffer)
                    {
                        importResource?.CompressedStream?.Dispose();
                    }

                    resourceBuffer.Clear();
                }

                // Import all remain tables
                string[] allTablesNotNull = resourceParamsBuffer.Where(r => r.Value.Rows.Count > 0).Select(r => r.Key).ToArray();
                foreach (string tableName in allTablesNotNull)
                {
                    DataTable dataTable = resourceParamsBuffer[tableName];
                    await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel);
                }

                // Wait all table import task complete
                while (importTasks.Count > 0)
                {
                    await importTasks.Dequeue();
                }

                // Upload remain error logs
                ImportProcessingProgress progress = await UploadImportErrorsAsync(importErrorStore, succeedCount, failedCount, importErrorBuffer.ToArray(), currentIndex, cancellationToken);

                await outputChannel.Writer.WriteAsync(progress, cancellationToken);
            }
            finally
            {
                outputChannel.Writer.Complete();
                _logger.LogInformation("Import data to SQL data store complete.");
            }
        }
        public (Channel <ImportProcessingProgress> progressChannel, Task importTask) Import(Channel <ImportResource> inputChannel, IImportErrorStore importErrorStore, CancellationToken cancellationToken)
        {
            Channel <ImportProcessingProgress> outputChannel = Channel.CreateUnbounded <ImportProcessingProgress>();

            Task importTask = Task.Run(
                async() =>
            {
                await ImportInternalAsync(inputChannel, outputChannel, importErrorStore, cancellationToken);
            },
                cancellationToken);

            return(outputChannel, importTask);
        }
        public async Task <TaskResultData> ExecuteAsync()
        {
            var fhirRequestContext = new FhirRequestContext(
                method: "Import",
                uriString: _inputData.UriString,
                baseUriString: _inputData.BaseUriString,
                correlationId: _inputData.TaskId,
                requestHeaders: new Dictionary <string, StringValues>(),
                responseHeaders: new Dictionary <string, StringValues>())
            {
                IsBackgroundTask = true,
            };

            _contextAccessor.RequestContext = fhirRequestContext;

            CancellationToken cancellationToken = _cancellationTokenSource.Token;

            long succeedImportCount = _importProgress.SucceedImportCount;
            long failedImportCount  = _importProgress.FailedImportCount;

            ImportProcessingTaskResult result = new ImportProcessingTaskResult();

            result.ResourceType = _inputData.ResourceType;

            try
            {
                if (cancellationToken.IsCancellationRequested)
                {
                    throw new OperationCanceledException();
                }

                Func <long, long> sequenceIdGenerator = (index) => _inputData.BeginSequenceId + index;

                // Clean resources before import start
                await _resourceBulkImporter.CleanResourceAsync(_inputData, _importProgress, cancellationToken);

                _importProgress.NeedCleanData = true;
                await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(_importProgress), cancellationToken);

                // Initialize error store
                IImportErrorStore importErrorStore = await _importErrorStoreFactory.InitializeAsync(GetErrorFileName(), cancellationToken);

                result.ErrorLogLocation = importErrorStore.ErrorFileLocation;

                // Load and parse resource from bulk resource
                (Channel <ImportResource> importResourceChannel, Task loadTask) = _importResourceLoader.LoadResources(_inputData.ResourceLocation, _importProgress.CurrentIndex, _inputData.ResourceType, sequenceIdGenerator, cancellationToken);

                // Import to data store
                (Channel <ImportProcessingProgress> progressChannel, Task importTask) = _resourceBulkImporter.Import(importResourceChannel, importErrorStore, cancellationToken);

                // Update progress for checkpoints
                await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync())
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        throw new OperationCanceledException("Import task is canceled by user.");
                    }

                    _importProgress.SucceedImportCount = progress.SucceedImportCount + succeedImportCount;
                    _importProgress.FailedImportCount  = progress.FailedImportCount + failedImportCount;
                    _importProgress.CurrentIndex       = progress.CurrentIndex;
                    result.SucceedCount = _importProgress.SucceedImportCount;
                    result.FailedCount  = _importProgress.FailedImportCount;

                    _logger.LogInformation("Import task progress: {0}", JsonConvert.SerializeObject(_importProgress));

                    try
                    {
                        await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(_importProgress), cancellationToken);
                    }
                    catch (Exception ex)
                    {
                        // ignore exception for progresss update
                        _logger.LogInformation(ex, "Failed to update context.");
                    }
                }

                // Pop up exception during load & import
                // Put import task before load task for resource channel full and blocking issue.
                try
                {
                    await importTask;
                }
                catch (TaskCanceledException)
                {
                    throw;
                }
                catch (OperationCanceledException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    _logger.LogError(ex, "Failed to import data.");
                    throw new RetriableTaskException("Failed to import data.", ex);
                }

                try
                {
                    await loadTask;
                }
                catch (TaskCanceledException)
                {
                    throw;
                }
                catch (OperationCanceledException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    _logger.LogError(ex, "Failed to load data.");
                    throw new RetriableTaskException("Failed to load data", ex);
                }

                return(new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(result)));
            }
            catch (TaskCanceledException canceledEx)
            {
                _logger.LogInformation(canceledEx, "Data processing task is canceled.");

                await CleanResourceForFailureAsync(canceledEx);

                return(new TaskResultData(TaskResult.Canceled, JsonConvert.SerializeObject(result)));
            }
            catch (OperationCanceledException canceledEx)
            {
                _logger.LogInformation(canceledEx, "Data processing task is canceled.");

                await CleanResourceForFailureAsync(canceledEx);

                return(new TaskResultData(TaskResult.Canceled, JsonConvert.SerializeObject(result)));
            }
            catch (RetriableTaskException retriableEx)
            {
                _logger.LogInformation(retriableEx, "Error in data processing task.");

                await CleanResourceForFailureAsync(retriableEx);

                throw;
            }
            catch (Exception ex)
            {
                _logger.LogInformation(ex, "Critical error in data processing task.");

                await CleanResourceForFailureAsync(ex);

                throw new RetriableTaskException(ex.Message);
            }
            finally
            {
                if (!_cancellationTokenSource.IsCancellationRequested)
                {
                    _cancellationTokenSource.Cancel();
                }
            }
        }