public async Task Invoke(HttpContext context, IFhirRequestContextAccessor fhirRequestContextAccessor, CorrelationIdProvider correlationIdProvider) { HttpRequest request = context.Request; string baseUriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase); string uriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase, request.Path, request.QueryString); string correlationId = correlationIdProvider.Invoke(); var fhirRequestContext = new FhirRequestContext( method: request.Method, uriString: uriInString, baseUriString: baseUriInString, correlationId: correlationId, requestHeaders: context.Request.Headers, responseHeaders: context.Response.Headers); context.Response.Headers[RequestIdHeaderName] = correlationId; fhirRequestContextAccessor.FhirRequestContext = fhirRequestContext; // Call the next delegate/middleware in the pipeline await _next(context); }
public Task Invoke(HttpContext context, IFhirRequestContextAccessor fhirRequestContextAccessor, CorrelationIdProvider correlationIdProvider) { HttpRequest request = context.Request; string baseUriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase); string uriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase, request.Path, request.QueryString); var fhirRequestContext = new FhirRequestContext( method: request.Method, uriString: uriInString, baseUriString: baseUriInString, requestType: ValueSets.AuditEventType.RestFulOperation, correlationId: correlationIdProvider.Invoke(), requestHeaders: context.Request.Headers, responseHeaders: context.Response.Headers); if (context.User != null) { fhirRequestContext.Principal = context.User; } fhirRequestContextAccessor.FhirRequestContext = fhirRequestContext; // Call the next delegate/middleware in the pipeline return(_next(context)); }
public Task Invoke(HttpContext context, IFhirRequestContextAccessor fhirRequestContextAccessor, CorrelationIdProvider correlationIdProvider) { HttpRequest request = context.Request; string baseUriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase); string uriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase, request.Path, request.QueryString); string correlationId = correlationIdProvider.Invoke(); object resourceType = null; RouteData routeData = context.GetRouteData(); if (routeData != null && routeData.Values != null) { routeData.Values.TryGetValue(KnownActionParameterNames.ResourceType, out resourceType); } var fhirRequestContext = new FhirRequestContext( method: request.Method, uriString: uriInString, baseUriString: baseUriInString, correlationId: correlationId, requestHeaders: context.Request.Headers, responseHeaders: context.Response.Headers, resourceType: resourceType?.ToString()); context.Response.Headers[RequestIdHeaderName] = correlationId; // Note that if this is executed before authentication occurs, the user will not contain any claims. if (context.User != null) { fhirRequestContext.Principal = context.User; } fhirRequestContextAccessor.FhirRequestContext = fhirRequestContext; // Call the next delegate/middleware in the pipeline return(_next(context)); }
public ResourceWrapperFactoryTests() { var serializer = new FhirJsonSerializer(); _rawResourceFactory = new RawResourceFactory(serializer); var dummyRequestContext = new FhirRequestContext( "POST", "https://localhost/Patient", "https://localhost/", Guid.NewGuid().ToString(), new Dictionary <string, StringValues>(), new Dictionary <string, StringValues>()); _fhirRequestContextAccessor = Substitute.For <RequestContextAccessor <IFhirRequestContext> >(); _fhirRequestContextAccessor.RequestContext.Returns(dummyRequestContext); _claimsExtractor = Substitute.For <IClaimsExtractor>(); _compartmentIndexer = Substitute.For <ICompartmentIndexer>(); _searchIndexer = Substitute.For <ISearchIndexer>(); _searchParameterDefinitionManager = Substitute.For <ISearchParameterDefinitionManager>(); _searchParameterDefinitionManager.GetSearchParameterHashForResourceType(Arg.Any <string>()).Returns("hash"); _resourceWrapperFactory = new ResourceWrapperFactory( _rawResourceFactory, _fhirRequestContextAccessor, _searchIndexer, _claimsExtractor, _compartmentIndexer, _searchParameterDefinitionManager, Deserializers.ResourceDeserializer); _nameSearchParameterInfo = new SearchParameterInfo("name", "name", ValueSets.SearchParamType.String, new Uri("https://localhost/searchParameter/name")) { SortStatus = SortParameterStatus.Enabled }; _addressSearchParameterInfo = new SearchParameterInfo("address-city", "address-city", ValueSets.SearchParamType.String, new Uri("https://localhost/searchParameter/address-city")) { SortStatus = SortParameterStatus.Enabled }; _ageSearchParameterInfo = new SearchParameterInfo("age", "age", ValueSets.SearchParamType.Number, new Uri("https://localhost/searchParameter/age")) { SortStatus = SortParameterStatus.Supported }; }
public ReindexJobCosmosThrottleControllerTests(ITestOutputHelper output) { _output = output; _fhirRequestContextAccessor = new FhirRequestContextAccessor(); var fhirRequestContext = new FhirRequestContext( method: OperationsConstants.Reindex, uriString: "$reindex", baseUriString: "$reindex", correlationId: "id", requestHeaders: new Dictionary <string, StringValues>(), responseHeaders: new Dictionary <string, StringValues>()) { IsBackgroundTask = true, AuditEventType = OperationsConstants.Reindex, }; _fhirRequestContextAccessor.RequestContext = fhirRequestContext; }
public Task Invoke(HttpContext context, IFhirRequestContextAccessor fhirRequestContextAccessor, CorrelationIdProvider correlationIdProvider) { HttpRequest request = context.Request; string baseUriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase); string uriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase, request.Path, request.QueryString); string correlationId = correlationIdProvider.Invoke(); var fhirRequestContext = new FhirRequestContext( method: request.Method, uriString: uriInString, baseUriString: baseUriInString, requestType: new CodingInfo(AuditEventType.System, AuditEventType.RestFulOperationCode), correlationId: correlationId, requestHeaders: context.Request.Headers, responseHeaders: context.Response.Headers); context.Response.Headers[RequestIdHeaderName] = correlationId; // Note that if this is executed before authentication occurs, the user will not contain any claims. if (context.User != null) { fhirRequestContext.Principal = context.User; } fhirRequestContextAccessor.FhirRequestContext = fhirRequestContext; // Call the next delegate/middleware in the pipeline return(_next(context)); }
public async Task Invoke(HttpContext context, RequestContextAccessor <IFhirRequestContext> fhirRequestContextAccessor, CorrelationIdProvider correlationIdProvider) { HttpRequest request = context.Request; string baseUriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase); string uriInString = UriHelper.BuildAbsolute( request.Scheme, request.Host, request.PathBase, request.Path, request.QueryString); string correlationId = correlationIdProvider.Invoke(); // https://www.hl7.org/fhir/http.html#custom // If X-Request-Id header is present, then put it value into X-Correlation-Id header for response. if (context.Request.Headers.TryGetValue(KnownHeaders.RequestId, out var requestId) && !string.IsNullOrEmpty(requestId)) { context.Response.Headers[KnownHeaders.CorrelationId] = requestId; } var fhirRequestContext = new FhirRequestContext( method: request.Method, uriString: uriInString, baseUriString: baseUriInString, correlationId: correlationId, requestHeaders: context.Request.Headers, responseHeaders: context.Response.Headers); context.Response.Headers[KnownHeaders.RequestId] = correlationId; fhirRequestContextAccessor.RequestContext = fhirRequestContext; // Call the next delegate/middleware in the pipeline await _next(context); }
/// <inheritdoc /> public async Task ExecuteAsync(ExportJobRecord exportJobRecord, WeakETag weakETag, CancellationToken cancellationToken) { EnsureArg.IsNotNull(exportJobRecord, nameof(exportJobRecord)); _exportJobRecord = exportJobRecord; _weakETag = weakETag; _fileManager = new ExportFileManager(_exportJobRecord, _exportDestinationClient); var existingFhirRequestContext = _contextAccessor.FhirRequestContext; try { ExportJobConfiguration exportJobConfiguration = _exportJobConfiguration; // Add a request context so that bundle issues can be added by the SearchOptionFactory var fhirRequestContext = new FhirRequestContext( method: "Export", uriString: "$export", baseUriString: "$export", correlationId: _exportJobRecord.Id, requestHeaders: new Dictionary <string, StringValues>(), responseHeaders: new Dictionary <string, StringValues>()) { IsBackgroundTask = true, }; _contextAccessor.FhirRequestContext = fhirRequestContext; string connectionHash = string.IsNullOrEmpty(_exportJobConfiguration.StorageAccountConnection) ? string.Empty : Health.Core.Extensions.StringExtensions.ComputeHash(_exportJobConfiguration.StorageAccountConnection); if (string.IsNullOrEmpty(exportJobRecord.StorageAccountUri)) { if (!string.Equals(exportJobRecord.StorageAccountConnectionHash, connectionHash, StringComparison.Ordinal)) { throw new DestinationConnectionException("Storage account connection string was updated during an export job.", HttpStatusCode.BadRequest); } } else { exportJobConfiguration = new ExportJobConfiguration(); exportJobConfiguration.Enabled = _exportJobConfiguration.Enabled; exportJobConfiguration.StorageAccountUri = exportJobRecord.StorageAccountUri; } if (_exportJobRecord.Filters != null && _exportJobRecord.Filters.Count > 0 && string.IsNullOrEmpty(_exportJobRecord.ResourceType)) { throw new BadRequestException(Resources.TypeFilterWithoutTypeIsUnsupported); } // Connect to export destination using appropriate client. await _exportDestinationClient.ConnectAsync(exportJobConfiguration, cancellationToken, _exportJobRecord.StorageAccountContainerName); // If we are resuming a job, we can detect that by checking the progress info from the job record. // If it is null, then we know we are processing a new job. if (_exportJobRecord.Progress == null) { _exportJobRecord.Progress = new ExportJobProgress(continuationToken: null, page: 0); } // The intial list of query parameters will not have a continutation token. We will add that later if we get one back // from the search result. var queryParametersList = new List <Tuple <string, string> >() { Tuple.Create(KnownQueryParameterNames.Count, _exportJobRecord.MaximumNumberOfResourcesPerQuery.ToString(CultureInfo.InvariantCulture)), Tuple.Create(KnownQueryParameterNames.LastUpdated, $"le{_exportJobRecord.QueuedTime.ToString("o", CultureInfo.InvariantCulture)}"), }; if (_exportJobRecord.Since != null) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.LastUpdated, $"ge{_exportJobRecord.Since}")); } ExportJobProgress progress = _exportJobRecord.Progress; await RunExportSearch(exportJobConfiguration, progress, queryParametersList, cancellationToken); await CompleteJobAsync(OperationStatus.Completed, cancellationToken); _logger.LogTrace("Successfully completed the job."); } catch (JobConflictException) { // The export job was updated externally. There might be some additional resources that were exported // but we will not be updating the job record. _logger.LogTrace("The job was updated by another process."); } catch (DestinationConnectionException dce) { _logger.LogError(dce, "Can't connect to destination. The job will be marked as failed."); _exportJobRecord.FailureDetails = new JobFailureDetails(dce.Message, dce.StatusCode); await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } catch (ResourceNotFoundException rnfe) { _logger.LogError(rnfe, "Can't find specified resource. The job will be marked as failed."); _exportJobRecord.FailureDetails = new JobFailureDetails(rnfe.Message, HttpStatusCode.BadRequest); await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } catch (FailedToParseAnonymizationConfigurationException ex) { _logger.LogError(ex, "Failed to parse anonymization configuration. The job will be marked as failed."); _exportJobRecord.FailureDetails = new JobFailureDetails(ex.Message, HttpStatusCode.BadRequest); await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } catch (AnonymizationConfigurationNotFoundException ex) { _logger.LogError(ex, "Cannot found anonymization configuration. The job will be marked as failed."); _exportJobRecord.FailureDetails = new JobFailureDetails(ex.Message, HttpStatusCode.BadRequest); await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } catch (AnonymizationConfigurationFetchException ex) { _logger.LogError(ex, "Failed to fetch anonymization configuration file. The job will be marked as failed."); _exportJobRecord.FailureDetails = new JobFailureDetails(ex.Message, HttpStatusCode.BadRequest); await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } catch (Exception ex) { // The job has encountered an error it cannot recover from. // Try to update the job to failed state. _logger.LogError(ex, "Encountered an unhandled exception. The job will be marked as failed."); _exportJobRecord.FailureDetails = new JobFailureDetails(Resources.UnknownError, HttpStatusCode.InternalServerError); await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } finally { _contextAccessor.FhirRequestContext = existingFhirRequestContext; } }
/// <inheritdoc /> public async Task ExecuteAsync(ReindexJobRecord reindexJobRecord, WeakETag weakETag, CancellationToken cancellationToken) { EnsureArg.IsNotNull(reindexJobRecord, nameof(reindexJobRecord)); EnsureArg.IsNotNull(weakETag, nameof(weakETag)); _reindexJobRecord = reindexJobRecord; _weakETag = weakETag; var jobSemaphore = new SemaphoreSlim(1, 1); var existingFhirRequestContext = _contextAccessor.FhirRequestContext; try { // Add a request context so Datastore consumption can be added var fhirRequestContext = new FhirRequestContext( method: OperationsConstants.Reindex, uriString: "$reindex", baseUriString: "$reindex", correlationId: _reindexJobRecord.Id, requestHeaders: new Dictionary <string, StringValues>(), responseHeaders: new Dictionary <string, StringValues>()) { IsBackgroundTask = true, AuditEventType = OperationsConstants.Reindex, }; _contextAccessor.FhirRequestContext = fhirRequestContext; using (IScoped <IFhirDataStore> store = _fhirDataStoreFactory()) { var provisionedCapacity = await store.Value.GetProvisionedDataStoreCapacityAsync(cancellationToken); _throttleController.Initialize(_reindexJobRecord, provisionedCapacity); } if (_reindexJobRecord.Status != OperationStatus.Running || _reindexJobRecord.StartTime == null) { // update job record to running _reindexJobRecord.Status = OperationStatus.Running; _reindexJobRecord.StartTime = Clock.UtcNow; await UpdateJobAsync(cancellationToken); } // If we are resuming a job, we can detect that by checking the progress info from the job record. // If no queries have been added to the progress then this is a new job if (_reindexJobRecord.QueryList?.Count == 0) { // Build query based on new search params // Find supported, but not yet searchable params var notYetIndexedParams = _supportedSearchParameterDefinitionManager.GetSearchParametersRequiringReindexing(); // if there are not any parameters which are supported but not yet indexed, then we have nothing to do if (!notYetIndexedParams.Any()) { _reindexJobRecord.Error.Add(new OperationOutcomeIssue( OperationOutcomeConstants.IssueSeverity.Information, OperationOutcomeConstants.IssueType.Informational, Resources.NoSearchParametersNeededToBeIndexed)); _reindexJobRecord.CanceledTime = DateTimeOffset.UtcNow; await CompleteJobAsync(OperationStatus.Canceled, cancellationToken); return; } // From the param list, get the list of necessary resources which should be // included in our query var resourceList = new HashSet <string>(); foreach (var param in notYetIndexedParams) { foreach (var baseResourceType in param.BaseResourceTypes) { if (baseResourceType == KnownResourceTypes.Resource) { resourceList.UnionWith(_modelInfoProvider.GetResourceTypeNames().ToHashSet()); // We added all possible resource types, so no need to continue break; } if (baseResourceType == KnownResourceTypes.DomainResource) { var domainResourceChildResourceTypes = _modelInfoProvider.GetResourceTypeNames().ToHashSet(); // Remove types that inherit from Resource directly domainResourceChildResourceTypes.Remove(KnownResourceTypes.Binary); domainResourceChildResourceTypes.Remove(KnownResourceTypes.Bundle); domainResourceChildResourceTypes.Remove(KnownResourceTypes.Parameters); resourceList.UnionWith(domainResourceChildResourceTypes); } else { resourceList.UnionWith(new[] { baseResourceType }); } } } _reindexJobRecord.Resources.AddRange(resourceList); _reindexJobRecord.SearchParams.AddRange(notYetIndexedParams.Select(p => p.Url.ToString())); await CalculateTotalAndResourceCounts(cancellationToken); if (_reindexJobRecord.Count == 0) { _reindexJobRecord.Error.Add(new OperationOutcomeIssue( OperationOutcomeConstants.IssueSeverity.Information, OperationOutcomeConstants.IssueType.Informational, Resources.NoResourcesNeedToBeReindexed)); await UpdateParametersAndCompleteJob(cancellationToken); return; } // Generate separate queries for each resource type and add them to query list. foreach (string resourceType in _reindexJobRecord.Resources) { // Checking resource specific counts is a performance improvement, // so if an entry for this resource failed to get added to the count dictionary, run a query anyways if (!_reindexJobRecord.ResourceCounts.ContainsKey(resourceType) || _reindexJobRecord.ResourceCounts[resourceType] > 0) { var query = new ReindexJobQueryStatus(resourceType, continuationToken: null) { LastModified = Clock.UtcNow, Status = OperationStatus.Queued, }; _reindexJobRecord.QueryList.TryAdd(query, 1); } } await UpdateJobAsync(cancellationToken); _throttleController.UpdateDatastoreUsage(); } var queryTasks = new List <Task <ReindexJobQueryStatus> >(); var queryCancellationTokens = new Dictionary <ReindexJobQueryStatus, CancellationTokenSource>(); // while not all queries are finished while (_reindexJobRecord.QueryList.Keys.Where(q => q.Status == OperationStatus.Queued || q.Status == OperationStatus.Running).Any()) { if (_reindexJobRecord.QueryList.Keys.Where(q => q.Status == OperationStatus.Queued).Any()) { // grab the next query from the list which is labeled as queued and run it var query = _reindexJobRecord.QueryList.Keys.Where(q => q.Status == OperationStatus.Queued).OrderBy(q => q.LastModified).FirstOrDefault(); CancellationTokenSource queryTokensSource = new CancellationTokenSource(); queryCancellationTokens.TryAdd(query, queryTokensSource); #pragma warning disable CS4014 // Suppressed as we want to continue execution and begin processing the next query while this continues to run queryTasks.Add(ProcessQueryAsync(query, jobSemaphore, queryTokensSource.Token)); #pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed _logger.LogInformation($"Reindex job task created {queryTasks.Count} Tasks"); } // reset stale queries to pending var staleQueries = _reindexJobRecord.QueryList.Keys.Where( q => q.Status == OperationStatus.Running && q.LastModified < Clock.UtcNow - _reindexJobConfiguration.JobHeartbeatTimeoutThreshold); foreach (var staleQuery in staleQueries) { await jobSemaphore.WaitAsync(); try { // if this query has a created task, cancel it if (queryCancellationTokens.TryGetValue(staleQuery, out var tokenSource)) { try { tokenSource.Cancel(false); } catch { // may throw exception if the task is disposed } } staleQuery.Status = OperationStatus.Queued; await UpdateJobAsync(cancellationToken); } finally { jobSemaphore.Release(); } } var averageDbConsumption = _throttleController.UpdateDatastoreUsage(); _logger.LogInformation($"Reindex avaerage DB consumption: {averageDbConsumption}"); var throttleDelayTime = _throttleController.GetThrottleBasedDelay(); _logger.LogInformation($"Reindex throttle delay: {throttleDelayTime}"); await Task.Delay(_reindexJobRecord.QueryDelayIntervalInMilliseconds + throttleDelayTime); // Remove all finished tasks from the collections of tasks // and cancellationTokens if (queryTasks.Count >= reindexJobRecord.MaximumConcurrency) { var taskArray = queryTasks.ToArray(); Task.WaitAny(taskArray); var finishedTasks = queryTasks.Where(t => t.IsCompleted).ToArray(); foreach (var finishedTask in finishedTasks) { queryTasks.Remove(finishedTask); queryCancellationTokens.Remove(await finishedTask); } } // if our received CancellationToken is cancelled we should // pass that cancellation request onto all the cancellationTokens // for the currently executing threads if (cancellationToken.IsCancellationRequested) { foreach (var tokenSource in queryCancellationTokens.Values) { tokenSource.Cancel(false); } } } Task.WaitAll(queryTasks.ToArray()); await jobSemaphore.WaitAsync(); try { await CheckJobCompletionStatus(cancellationToken); } finally { jobSemaphore.Release(); } } catch (JobConflictException) { // The reindex job was updated externally. _logger.LogInformation("The job was updated by another process."); } catch (Exception ex) { await jobSemaphore.WaitAsync(); try { _reindexJobRecord.Error.Add(new OperationOutcomeIssue( OperationOutcomeConstants.IssueSeverity.Error, OperationOutcomeConstants.IssueType.Exception, ex.Message)); _reindexJobRecord.FailureCount++; _logger.LogError(ex, "Encountered an unhandled exception. The job failure count increased to {failureCount}.", _reindexJobRecord.FailureCount); await UpdateJobAsync(cancellationToken); if (_reindexJobRecord.FailureCount >= _reindexJobConfiguration.ConsecutiveFailuresThreshold) { await CompleteJobAsync(OperationStatus.Failed, cancellationToken); } else { _reindexJobRecord.Status = OperationStatus.Queued; await UpdateJobAsync(cancellationToken); } } finally { jobSemaphore.Release(); } } finally { jobSemaphore.Dispose(); _contextAccessor.FhirRequestContext = existingFhirRequestContext; } }
/// <inheritdoc /> public async Task ExecuteAsync(ReindexJobRecord reindexJobRecord, WeakETag weakETag, CancellationToken cancellationToken) { EnsureArg.IsNotNull(reindexJobRecord, nameof(reindexJobRecord)); EnsureArg.IsNotNull(weakETag, nameof(weakETag)); if (_reindexJobRecord != null) { throw new NotSupportedException($"{nameof(ReindexJobTask)} can work only on one {nameof(reindexJobRecord)}. Please create new {nameof(ReindexJobTask)} to process this instance of {nameof(reindexJobRecord)}"); } _reindexJobRecord = reindexJobRecord; _weakETag = weakETag; _jobSemaphore = new SemaphoreSlim(1, 1); _cancellationToken = cancellationToken; var originalRequestContext = _contextAccessor.RequestContext; try { // Add a request context so Datastore consumption can be added var fhirRequestContext = new FhirRequestContext( method: OperationsConstants.Reindex, uriString: "$reindex", baseUriString: "$reindex", correlationId: _reindexJobRecord.Id, requestHeaders: new Dictionary <string, StringValues>(), responseHeaders: new Dictionary <string, StringValues>()) { IsBackgroundTask = true, AuditEventType = OperationsConstants.Reindex, }; _contextAccessor.RequestContext = fhirRequestContext; if (reindexJobRecord.TargetDataStoreUsagePercentage != null && reindexJobRecord.TargetDataStoreUsagePercentage > 0) { using (IScoped <IFhirDataStore> store = _fhirDataStoreFactory.Invoke()) { var provisionedCapacity = await store.Value.GetProvisionedDataStoreCapacityAsync(_cancellationToken); _throttleController.Initialize(_reindexJobRecord, provisionedCapacity); } } else { _throttleController.Initialize(_reindexJobRecord, null); } // If we are resuming a job, we can detect that by checking the progress info from the job record. // If no queries have been added to the progress then this is a new job if (_reindexJobRecord.QueryList?.Count == 0) { if (!await TryPopulateNewJobFields()) { return; } } if (_reindexJobRecord.Status != OperationStatus.Running || _reindexJobRecord.StartTime == null) { // update job record to running _reindexJobRecord.Status = OperationStatus.Running; _reindexJobRecord.StartTime = Clock.UtcNow; await UpdateJobAsync(); } await ProcessJob(); await _jobSemaphore.WaitAsync(_cancellationToken); try { await CheckJobCompletionStatus(); } finally { _jobSemaphore.Release(); } } catch (JobConflictException) { // The reindex job was updated externally. _logger.LogInformation("The job was updated by another process."); } catch (OperationCanceledException) { _logger.LogInformation("The reindex job was canceled."); } catch (Exception ex) { await HandleException(ex); } finally { _jobSemaphore.Dispose(); _contextAccessor.RequestContext = originalRequestContext; } }
private async Task ExecuteRequests(Hl7.Fhir.Model.Bundle responseBundle, Hl7.Fhir.Model.Bundle.HTTPVerb httpVerb) { foreach (RouteContext request in _requests[httpVerb]) { var entryComponent = new Hl7.Fhir.Model.Bundle.EntryComponent(); if (request.Handler != null) { HttpContext httpContext = request.HttpContext; IFhirRequestContext originalFhirRequestContext = _fhirRequestContextAccessor.FhirRequestContext; request.RouteData.Values.TryGetValue(KnownActionParameterNames.ResourceType, out object resourceType); var newFhirRequestContext = new FhirRequestContext( httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), originalFhirRequestContext.BaseUri.OriginalString, originalFhirRequestContext.CorrelationId, httpContext.Request.Headers, httpContext.Response.Headers, resourceType?.ToString()) { Principal = originalFhirRequestContext.Principal, }; _fhirRequestContextAccessor.FhirRequestContext = newFhirRequestContext; _bundleHttpContextAccessor.HttpContext = httpContext; await request.Handler.Invoke(httpContext); httpContext.Response.Body.Seek(0, SeekOrigin.Begin); string bodyContent = new StreamReader(httpContext.Response.Body).ReadToEnd(); ResponseHeaders responseHeaders = httpContext.Response.GetTypedHeaders(); entryComponent.Response = new Hl7.Fhir.Model.Bundle.ResponseComponent { Status = httpContext.Response.StatusCode.ToString(), Location = responseHeaders.Location?.OriginalString, Etag = responseHeaders.ETag?.ToString(), LastModified = responseHeaders.LastModified, }; if (!string.IsNullOrWhiteSpace(bodyContent)) { var entryComponentResource = _fhirJsonParser.Parse <Resource>(bodyContent); if (entryComponentResource.ResourceType == ResourceType.OperationOutcome) { entryComponent.Response.Outcome = entryComponentResource; if (responseBundle.Type == Hl7.Fhir.Model.Bundle.BundleType.TransactionResponse) { ThrowTransactionException(httpContext, (OperationOutcome)entryComponentResource); } } else { entryComponent.Resource = entryComponentResource; } } else { if (httpContext.Response.StatusCode == (int)HttpStatusCode.Forbidden) { entryComponent.Response.Outcome = CreateOperationOutcome( OperationOutcome.IssueSeverity.Error, OperationOutcome.IssueType.Forbidden, Api.Resources.Forbidden); } } } else { entryComponent.Response = new Hl7.Fhir.Model.Bundle.ResponseComponent { Status = ((int)HttpStatusCode.NotFound).ToString(), Outcome = CreateOperationOutcome( OperationOutcome.IssueSeverity.Error, OperationOutcome.IssueType.NotFound, string.Format(Api.Resources.BundleNotFound, $"{request.HttpContext.Request.Path}{request.HttpContext.Request.QueryString}")), }; } responseBundle.Entry.Add(entryComponent); } }
public async Task <TaskResultData> ExecuteAsync() { var fhirRequestContext = new FhirRequestContext( method: "Import", uriString: _inputData.UriString, baseUriString: _inputData.BaseUriString, correlationId: _inputData.TaskId, requestHeaders: new Dictionary <string, StringValues>(), responseHeaders: new Dictionary <string, StringValues>()) { IsBackgroundTask = true, }; _contextAccessor.RequestContext = fhirRequestContext; CancellationToken cancellationToken = _cancellationTokenSource.Token; long succeedImportCount = _importProgress.SucceedImportCount; long failedImportCount = _importProgress.FailedImportCount; ImportProcessingTaskResult result = new ImportProcessingTaskResult(); result.ResourceType = _inputData.ResourceType; try { if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(); } Func <long, long> sequenceIdGenerator = (index) => _inputData.BeginSequenceId + index; // Clean resources before import start await _resourceBulkImporter.CleanResourceAsync(_inputData, _importProgress, cancellationToken); _importProgress.NeedCleanData = true; await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(_importProgress), cancellationToken); // Initialize error store IImportErrorStore importErrorStore = await _importErrorStoreFactory.InitializeAsync(GetErrorFileName(), cancellationToken); result.ErrorLogLocation = importErrorStore.ErrorFileLocation; // Load and parse resource from bulk resource (Channel <ImportResource> importResourceChannel, Task loadTask) = _importResourceLoader.LoadResources(_inputData.ResourceLocation, _importProgress.CurrentIndex, _inputData.ResourceType, sequenceIdGenerator, cancellationToken); // Import to data store (Channel <ImportProcessingProgress> progressChannel, Task importTask) = _resourceBulkImporter.Import(importResourceChannel, importErrorStore, cancellationToken); // Update progress for checkpoints await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) { if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException("Import task is canceled by user."); } _importProgress.SucceedImportCount = progress.SucceedImportCount + succeedImportCount; _importProgress.FailedImportCount = progress.FailedImportCount + failedImportCount; _importProgress.CurrentIndex = progress.CurrentIndex; result.SucceedCount = _importProgress.SucceedImportCount; result.FailedCount = _importProgress.FailedImportCount; _logger.LogInformation("Import task progress: {0}", JsonConvert.SerializeObject(_importProgress)); try { await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(_importProgress), cancellationToken); } catch (Exception ex) { // ignore exception for progresss update _logger.LogInformation(ex, "Failed to update context."); } } // Pop up exception during load & import // Put import task before load task for resource channel full and blocking issue. try { await importTask; } catch (TaskCanceledException) { throw; } catch (OperationCanceledException) { throw; } catch (Exception ex) { _logger.LogError(ex, "Failed to import data."); throw new RetriableTaskException("Failed to import data.", ex); } try { await loadTask; } catch (TaskCanceledException) { throw; } catch (OperationCanceledException) { throw; } catch (Exception ex) { _logger.LogError(ex, "Failed to load data."); throw new RetriableTaskException("Failed to load data", ex); } return(new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(result))); } catch (TaskCanceledException canceledEx) { _logger.LogInformation(canceledEx, "Data processing task is canceled."); await CleanResourceForFailureAsync(canceledEx); return(new TaskResultData(TaskResult.Canceled, JsonConvert.SerializeObject(result))); } catch (OperationCanceledException canceledEx) { _logger.LogInformation(canceledEx, "Data processing task is canceled."); await CleanResourceForFailureAsync(canceledEx); return(new TaskResultData(TaskResult.Canceled, JsonConvert.SerializeObject(result))); } catch (RetriableTaskException retriableEx) { _logger.LogInformation(retriableEx, "Error in data processing task."); await CleanResourceForFailureAsync(retriableEx); throw; } catch (Exception ex) { _logger.LogInformation(ex, "Critical error in data processing task."); await CleanResourceForFailureAsync(ex); throw new RetriableTaskException(ex.Message); } finally { if (!_cancellationTokenSource.IsCancellationRequested) { _cancellationTokenSource.Cancel(); } } }