public async Task GivenPerturbAnonymizationConfig_WhenAnonymizeResource_ThenPerturbedNodeShouldBeReturned() { string configurationContent = @" { ""fhirPathRules"": [ { ""path"": ""Condition.onset as Age"", ""method"": ""perturb"", ""span"": 0, ""roundTo"": ""2"" } ] }"; Condition condition = new Condition(); condition.Onset = new Age { Value = 20 }; IAnonymizer anonymizer = await CreateAnonymizerFromConfigContent(configurationContent); ResourceElement resourceElement = anonymizer.Anonymize(new ResourceElement(condition.ToTypedElement())); Condition anonymizedResource = resourceElement.Instance.ToPoco <Condition>(); Assert.InRange((anonymizedResource.Onset as Age).Value.GetValueOrDefault(), 20, 20); }
public AncestryDnaHierarchicalClusteringViewModel(IMatchesLoader matchesLoader, IAnonymizer anonymizer) { _matchesLoader = matchesLoader; _anonymizer = anonymizer; SelectFileCommand = new RelayCommand(SelectFile); SelectCorrelationFileCommand = new RelayCommand(SelectCorrelationFile); ProcessSavedDataCommand = new RelayCommand(async() => await ProcessSavedDataAsync()); MinClusterSize = Settings.Default.MinClusterSize; Filename = Settings.Default.Filename; MinCentimorgansInSharedMatches = Settings.Default.MinCentimorgansInSharedMatches; MinCentimorgansToCluster = Settings.Default.MinCentimorgansToCluster; MaxMatchesPerClusterFile = Settings.Default.MaxMatchesPerClusterFile; MaxGrayPercentage = Settings.Default.MaxGrayPercentage; FilterToGuids = Settings.Default.FilterToGuids; AncestryHostName = Settings.Default.AncestryHostName; ExcludeClustersGreaterThan = Settings.Default.ExcludeClustersGreaterThan > 0 ? Settings.Default.ExcludeClustersGreaterThan : (int?)null; CorrelationFilename = Settings.Default.CorrelationFilename; ShowAdvancedClusteringOptions = Settings.Default.ShowAdvancedClusteringOptions; ClusterTypeVeryClose = Settings.Default.ClusterTypeVeryClose; ClusterTypeOver20 = Settings.Default.ClusterTypeOver20; ClusterTypeComplete = Settings.Default.ClusterTypeComplete; OpenClusterFileWhenComplete = Settings.Default.OpenClusterFileWhenComplete; }
public async Task GivenDateShiftAnonymizationConfig_WhenAnonymizeResource_ThenShiftedNodeShouldBeReturned() { string configurationContent = @" { ""fhirPathRules"": [ { ""path"": ""Patient.birthDate"", ""method"": ""dateShift"" } ], ""parameters"": { ""dateShiftKey"": ""123"" } }"; Patient patient = new Patient(); patient.BirthDate = "2001-01-01"; IAnonymizer anonymizer = await CreateAnonymizerFromConfigContent(configurationContent); ResourceElement resourceElement = anonymizer.Anonymize(new ResourceElement(patient.ToTypedElement())); Patient anonymizedResource = resourceElement.Instance.ToPoco <Patient>(); Assert.Equal("2001-02-20", anonymizedResource.BirthDate); }
private Match GetAnonymizedMatch(Match match, IAnonymizer anonymizer) { if (anonymizer == null) { return(match); } return(new Match { MatchTestAdminDisplayName = anonymizer.GetAnonymizedName(match.MatchTestAdminDisplayName), MatchTestDisplayName = anonymizer.GetAnonymizedName(match.MatchTestDisplayName), TestGuid = anonymizer.GetAnonymizedGuid(match.TestGuid), SharedCentimorgans = match.SharedCentimorgans, SharedSegments = match.SharedSegments, LongestBlock = match.LongestBlock, TreeType = match.TreeType, TreeUrl = match.TreeUrl == null ? null : "https://invalid", TreeSize = match.TreeSize, HasCommonAncestors = match.HasCommonAncestors, CommonAncestors = match.CommonAncestors?.Select(commonAncestor => anonymizer.GetAnonymizedName(commonAncestor)).ToList(), Starred = match.Starred, HasHint = match.HasHint, Note = null, TagIds = match.TagIds, IsFather = match.IsFather, IsMother = match.IsMother, }); }
public async Task GivenCryptoHashAnonymizationConfig_WhenAnonymizeResource_ThenHashedNodeShouldBeReturned() { string configurationContent = @" { ""fhirPathRules"": [ { ""path"": ""Resource.id"", ""method"": ""cryptoHash"" } ], ""parameters"": { ""cryptoHashKey"": ""123"" } }"; Patient patient = new Patient(); patient.Id = "123"; IAnonymizer anonymizer = await CreateAnonymizerFromConfigContent(configurationContent); ResourceElement resourceElement = anonymizer.Anonymize(new ResourceElement(patient.ToTypedElement())); Patient anonymizedResource = resourceElement.Instance.ToPoco <Patient>(); Assert.Equal("3cafe40f92be6ac77d2792b4b267c2da11e3f3087b93bb19c6c5133786984b44", anonymizedResource.Id); }
public async Task GivenGeneralizeAnonymizationConfig_WhenAnonymizeResource_ThenPropertiesShouldBeGeneralized() { string configurationContent = @" { ""fhirPathRules"": [ { ""path"": ""Patient.birthDate"", ""method"": ""generalize"", ""cases"": { ""$this <= @2020-01-01 and $this >= @1990-01-01"": ""@2000-01-01"" } } ] }"; Patient patient = new Patient(); patient.BirthDate = "2001-01-01"; IAnonymizer anonymizer = await CreateAnonymizerFromConfigContent(configurationContent); ResourceElement resourceElement = anonymizer.Anonymize(new ResourceElement(patient.ToTypedElement())); Patient anonymizedResource = resourceElement.Instance.ToPoco <Patient>(); Assert.Equal("2000-01-01", anonymizedResource.BirthDate); }
private async Task SearchCompartmentWithFilter( ExportJobConfiguration exportJobConfiguration, ExportJobProgress progress, string resourceType, List <Tuple <string, string> > queryParametersList, IAnonymizer anonymizer, string batchIdPrefix, CancellationToken cancellationToken) { // Current batch will be used to organize a set of search results into a group so that they can be committed together. string currentBatchId = batchIdPrefix + "-" + progress.Page.ToString("d6"); // Process the export if: // 1. There is continuation token, which means there is more resource to be exported. // 2. There is no continuation token but the page is 0, which means it's the initial export. while (progress.ContinuationToken != null || progress.Page == 0) { SearchResult searchResult = null; // Search and process the results. using (IScoped <ISearchService> searchService = _searchServiceFactory()) { searchResult = await searchService.Value.SearchCompartmentAsync( compartmentType : KnownResourceTypes.Patient, compartmentId : progress.TriggeringResourceId, resourceType : resourceType, queryParametersList, cancellationToken); } await ProcessSearchResultsAsync(searchResult.Results, currentBatchId, anonymizer, cancellationToken); if (searchResult.ContinuationToken == null) { // No more continuation token, we are done. break; } await ProcessProgressChange(exportJobConfiguration, progress, queryParametersList, searchResult.ContinuationToken, false, cancellationToken); currentBatchId = batchIdPrefix + '-' + progress.Page.ToString("d6"); } // Commit one last time for any pending changes. await _exportDestinationClient.CommitAsync(exportJobConfiguration, cancellationToken); progress.MarkFilterFinished(); await UpdateJobRecordAsync(cancellationToken); }
private async Task ProcessFilterForCompartment( ExportJobConfiguration exportJobConfiguration, ExportJobProgress exportJobProgress, List <Tuple <string, string> > queryParametersList, IAnonymizer anonymizer, string batchIdPrefix, CancellationToken cancellationToken) { var index = _exportJobRecord.Filters.IndexOf(exportJobProgress.CurrentFilter); List <Tuple <string, string> > filterQueryParametersList = new List <Tuple <string, string> >(queryParametersList); foreach (var param in exportJobProgress.CurrentFilter.Parameters) { filterQueryParametersList.Add(param); } await SearchCompartmentWithFilter(exportJobConfiguration, exportJobProgress, exportJobProgress.CurrentFilter.ResourceType, filterQueryParametersList, anonymizer, batchIdPrefix + index, cancellationToken); }
public async Task GivenAValidAnonymizationConfiguration_WhenCreatingAnonymizer_AnonymizerShouldBeCreated() { IArtifactProvider client = Substitute.For <IArtifactProvider>(); client.FetchAsync(Arg.Any <string>(), Arg.Any <Stream>(), Arg.Any <CancellationToken>()).Returns <Task>( x => { Stream target = x.ArgAt <Stream>(1); target.Write(Encoding.UTF8.GetBytes(SampleConfiguration), 0, SampleConfiguration.Length); return(Task.CompletedTask); }); ILogger <ExportJobTask> logger = Substitute.For <ILogger <ExportJobTask> >(); ExportAnonymizerFactory factory = new ExportAnonymizerFactory(client, logger); IAnonymizer anonymizer = await factory.CreateAnonymizerAsync("http://dummy", CancellationToken.None); Assert.NotNull(anonymizer); }
public async Task GivenRedactAnonymizationConfig_WhenAnonymizeResource_ThenPropertiesShouldBeRedacted() { string configurationContent = @" { ""fhirPathRules"": [ { ""path"": ""Patient.name"", ""method"": ""redact"" } ] }"; Patient patient = new Patient(); patient.Name.Add(HumanName.ForFamily("Test")); IAnonymizer anonymizer = await CreateAnonymizerFromConfigContent(configurationContent); ResourceElement resourceElement = anonymizer.Anonymize(new ResourceElement(patient.ToTypedElement())); Patient anonymizedResource = resourceElement.Instance.ToPoco <Patient>(); Assert.Empty(anonymizedResource.Name); }
public async Task GivenSubstituteAnonymizationConfig_WhenAnonymizeResource_ThenSubstitutedNodeShouldBeReturned() { string configurationContent = @" { ""fhirPathRules"": [ { ""path"": ""Patient.name.family"", ""method"": ""substitute"", ""replaceWith"": ""test"" } ] }"; Patient patient = new Patient(); patient.Name.Add(HumanName.ForFamily("input")); IAnonymizer anonymizer = await CreateAnonymizerFromConfigContent(configurationContent); ResourceElement resourceElement = anonymizer.Anonymize(new ResourceElement(patient.ToTypedElement())); Patient anonymizedResource = resourceElement.Instance.ToPoco <Patient>(); Assert.Equal("test", anonymizedResource.Name.First().Family); }
public async Task <(string, List <IClusterableMatch>, List <Tag>)> LoadClusterableMatchesAsync(string savedData, double minCentimorgansToCluster, double minCentimorgansInSharedMatches, IAnonymizer anonymizer, ProgressData progressData) { progressData.Description = "Loading data..."; var serializedMatchesReaders = _serializedMatchesReaders.Where(reader => reader.IsSupportedFileType(savedData)).ToList(); if (serializedMatchesReaders.Count == 0) { MessageBox.Show("Unsupported file type."); return(null, null, null); } Serialized input = null; string errorMessage = null; foreach (var serializedMatchesReader in serializedMatchesReaders) { string thisErrorMessage; (input, thisErrorMessage) = await serializedMatchesReader.ReadFileAsync(savedData, progressData); if (input != null) { break; } if (errorMessage == null) { errorMessage = thisErrorMessage; } } if (input == null) { MessageBox.Show(errorMessage); return(null, null, null); } return(await Task.Run(() => { var strongMatches = input.Matches.Where(match => match.SharedCentimorgans >= minCentimorgansToCluster).ToList(); var maxMatchIndex = strongMatches.Count + 1; var maxIcwIndex = Math.Min(maxMatchIndex, input.Matches.Count(match => match.SharedCentimorgans >= minCentimorgansInSharedMatches) + 1); maxIcwIndex = Math.Min(maxIcwIndex, input.Matches.Count - 1); var strongMatchesGuids = new HashSet <string>(strongMatches.Select(match => match.TestGuid), StringComparer.OrdinalIgnoreCase); var icw = input.Icw .Where(kvp => strongMatchesGuids.Contains(kvp.Key)) .OrderBy(kvp => input.MatchIndexes.TryGetValue(kvp.Key, out var index) ? index : input.MatchIndexes.Count) .ToDictionary( kvp => kvp.Key, kvp => kvp.Value.Where(index => index <= maxIcwIndex).ToList() ); var matchesDictionary = strongMatches.ToDictionary(match => match.TestGuid); var clusterableMatches = icw .AsParallel().AsOrdered() .Select((kvp, index) => { var match = matchesDictionary[kvp.Key]; match = GetAnonymizedMatch(match, anonymizer); return (IClusterableMatch) new ClusterableMatch(index, match, kvp.Value); } ) .ToList(); clusterableMatches = MaybeFilterMassivelySharedMatches(clusterableMatches); var testTakerTestId = anonymizer?.GetAnonymizedGuid(input.TestTakerTestId) ?? input.TestTakerTestId; var tags = anonymizer == null ? input.Tags : input.Tags?.Select((tag, index) => new Tag { TagId = tag.TagId, Color = tag.Color, Label = $"Group{index}" }).ToList(); return (testTakerTestId, clusterableMatches, tags); })); }
private async Task ProcessSearchResultsAsync(IEnumerable <SearchResultEntry> searchResults, string partId, IAnonymizer anonymizer, CancellationToken cancellationToken) { foreach (SearchResultEntry result in searchResults) { ResourceWrapper resourceWrapper = result.Resource; string resourceType = resourceWrapper.ResourceTypeName; // Check whether we already have an existing file for the current resource type. if (!_resourceTypeToFileInfoMapping.TryGetValue(resourceType, out ExportFileInfo exportFileInfo)) { // Check whether we have seen this file previously (in situations where we are resuming an export) if (_exportJobRecord.Output.TryGetValue(resourceType, out exportFileInfo)) { // A file already exists for this resource type. Let us open the file on the client. await _exportDestinationClient.OpenFileAsync(exportFileInfo.FileUri, cancellationToken); } else { // File does not exist. Create it. string fileName = resourceType + ".ndjson"; Uri fileUri = await _exportDestinationClient.CreateFileAsync(fileName, cancellationToken); exportFileInfo = new ExportFileInfo(resourceType, fileUri, sequence: 0); // Since we created a new file the JobRecord Output also needs to know about it. _exportJobRecord.Output.TryAdd(resourceType, exportFileInfo); } _resourceTypeToFileInfoMapping.Add(resourceType, exportFileInfo); } ResourceElement element = _resourceDeserializer.Deserialize(resourceWrapper); if (anonymizer != null) { element = anonymizer.Anonymize(element); } // Serialize into NDJson and write to the file. byte[] bytesToWrite = _resourceToByteArraySerializer.Serialize(element); await _exportDestinationClient.WriteFilePartAsync(exportFileInfo.FileUri, partId, bytesToWrite, cancellationToken); // Increment the file information. exportFileInfo.IncrementCount(bytesToWrite.Length); } }
private async Task RunExportSearch( ExportJobConfiguration exportJobConfiguration, ExportJobProgress progress, List <Tuple <string, string> > sharedQueryParametersList, CancellationToken cancellationToken) { EnsureArg.IsNotNull(exportJobConfiguration, nameof(exportJobConfiguration)); EnsureArg.IsNotNull(progress, nameof(progress)); EnsureArg.IsNotNull(sharedQueryParametersList, nameof(sharedQueryParametersList)); // Current batch will be used to organize a set of search results into a group so that they can be committed together. string currentBatchId = progress.Page.ToString("d6"); List <Tuple <string, string> > queryParametersList = new List <Tuple <string, string> >(sharedQueryParametersList); if (progress.ContinuationToken != null) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.ContinuationToken, progress.ContinuationToken)); } if (_exportJobRecord.ExportType == ExportJobType.Patient) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.Type, KnownResourceTypes.Patient)); } else if (_exportJobRecord.ExportType == ExportJobType.All && !string.IsNullOrEmpty(_exportJobRecord.ResourceType)) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.Type, _exportJobRecord.ResourceType)); } IAnonymizer anonymizer = IsAnonymizedExportJob() ? await CreateAnonymizerAsync(cancellationToken) : null; // Process the export if: // 1. There is continuation token, which means there is more resource to be exported. // 2. There is no continuation token but the page is 0, which means it's the initial export. while (progress.ContinuationToken != null || progress.Page == 0) { SearchResult searchResult = null; // Search and process the results. switch (_exportJobRecord.ExportType) { case ExportJobType.All: case ExportJobType.Patient: using (IScoped <ISearchService> searchService = _searchServiceFactory()) { searchResult = await searchService.Value.SearchAsync( resourceType : null, queryParametersList, cancellationToken); } break; case ExportJobType.Group: searchResult = await GetGroupPatients( _exportJobRecord.GroupId, queryParametersList, _exportJobRecord.QueuedTime, cancellationToken); break; } if (_exportJobRecord.ExportType == ExportJobType.Patient || _exportJobRecord.ExportType == ExportJobType.Group) { uint resultIndex = 0; foreach (SearchResultEntry result in searchResult.Results) { // If a job is resumed in the middle of processing patient compartment resources it will skip patients it has already exported compartment information for. // This assumes the order of the search results is the same every time the same search is performed. if (progress.SubSearch != null && result.Resource.ResourceId != progress.SubSearch.TriggeringResourceId) { resultIndex++; continue; } if (progress.SubSearch == null) { progress.NewSubSearch(result.Resource.ResourceId); } await RunExportCompartmentSearch(exportJobConfiguration, progress.SubSearch, sharedQueryParametersList, cancellationToken, currentBatchId + ":" + resultIndex.ToString("d6")); resultIndex++; progress.ClearSubSearch(); } } await ProcessSearchResultsAsync(searchResult.Results, currentBatchId, anonymizer, cancellationToken); if (searchResult.ContinuationToken == null) { // No more continuation token, we are done. break; } await ProcessProgressChange( exportJobConfiguration, progress, queryParametersList, searchResult.ContinuationToken, forceCommit : _exportJobRecord.ExportType == ExportJobType.Patient || _exportJobRecord.ExportType == ExportJobType.Group, cancellationToken); currentBatchId = progress.Page.ToString("d6"); } // Commit one last time for any pending changes. await _exportDestinationClient.CommitAsync(exportJobConfiguration, cancellationToken); }
private async Task ProcessSearchResultsAsync(IEnumerable <SearchResultEntry> searchResults, string partId, IAnonymizer anonymizer, CancellationToken cancellationToken) { foreach (SearchResultEntry result in searchResults) { ResourceWrapper resourceWrapper = result.Resource; ResourceElement element = _resourceDeserializer.Deserialize(resourceWrapper); if (anonymizer != null) { element = anonymizer.Anonymize(element); } // Serialize into NDJson and write to the file. byte[] bytesToWrite = _resourceToByteArraySerializer.Serialize(element); await _fileManager.WriteToFile(resourceWrapper.ResourceTypeName, partId, bytesToWrite, cancellationToken); } }
private async Task RunExportCompartmentSearch( ExportJobConfiguration exportJobConfiguration, ExportJobProgress progress, List <Tuple <string, string> > sharedQueryParametersList, IAnonymizer anonymizer, CancellationToken cancellationToken, string batchIdPrefix = "") { EnsureArg.IsNotNull(exportJobConfiguration, nameof(exportJobConfiguration)); EnsureArg.IsNotNull(progress, nameof(progress)); EnsureArg.IsNotNull(sharedQueryParametersList, nameof(sharedQueryParametersList)); List <Tuple <string, string> > queryParametersList = new List <Tuple <string, string> >(sharedQueryParametersList); if (progress.ContinuationToken != null) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.ContinuationToken, progress.ContinuationToken)); } var requestedResourceTypes = _exportJobRecord.ResourceType?.Split(','); var filteredResources = new HashSet <string>(StringComparer.OrdinalIgnoreCase); if (_exportJobRecord.Filters != null) { foreach (var filter in _exportJobRecord.Filters) { filteredResources.Add(filter.ResourceType); } } if (progress.CurrentFilter != null) { await ProcessFilterForCompartment(exportJobConfiguration, progress, queryParametersList, anonymizer, batchIdPrefix + "-filter", cancellationToken); } if (_exportJobRecord.Filters != null) { foreach (var filter in _exportJobRecord.Filters) { if (!progress.CompletedFilters.Contains(filter) && requestedResourceTypes != null && requestedResourceTypes.Contains(filter.ResourceType, StringComparison.OrdinalIgnoreCase)) { progress.SetFilter(filter); await ProcessFilterForCompartment(exportJobConfiguration, progress, queryParametersList, anonymizer, batchIdPrefix + "-filter", cancellationToken); } } } if (_exportJobRecord.Filters == null || _exportJobRecord.Filters.Count == 0 || !requestedResourceTypes.All(resourceType => filteredResources.Contains(resourceType))) { if (requestedResourceTypes != null) { List <string> resources = new List <string>(); foreach (var resource in requestedResourceTypes) { if (!filteredResources.Contains(resource)) { resources.Add(resource); } } if (resources.Count > 0) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.Type, resources.JoinByOrSeparator())); } } await SearchCompartmentWithFilter(exportJobConfiguration, progress, null, queryParametersList, anonymizer, batchIdPrefix, cancellationToken); } }
private async Task SearchWithFilter( ExportJobConfiguration exportJobConfiguration, ExportJobProgress progress, string resourceType, List <Tuple <string, string> > queryParametersList, List <Tuple <string, string> > sharedQueryParametersList, IAnonymizer anonymizer, string batchIdPrefix, CancellationToken cancellationToken) { // Current batch will be used to organize a set of search results into a group so that they can be committed together. string currentBatchId = batchIdPrefix + progress.Page.ToString("d6"); // Process the export if: // 1. There is continuation token, which means there is more resource to be exported. // 2. There is no continuation token but the page is 0, which means it's the initial export. while (progress.ContinuationToken != null || progress.Page == 0) { SearchResult searchResult = null; // Search and process the results. switch (_exportJobRecord.ExportType) { case ExportJobType.All: case ExportJobType.Patient: using (IScoped <ISearchService> searchService = _searchServiceFactory()) { searchResult = await searchService.Value.SearchAsync( resourceType : resourceType, queryParametersList, cancellationToken); } break; case ExportJobType.Group: searchResult = await GetGroupPatients( _exportJobRecord.GroupId, queryParametersList, _exportJobRecord.QueuedTime, cancellationToken); break; } if (_exportJobRecord.ExportType == ExportJobType.Patient || _exportJobRecord.ExportType == ExportJobType.Group) { uint resultIndex = 0; foreach (SearchResultEntry result in searchResult.Results) { // If a job is resumed in the middle of processing patient compartment resources it will skip patients it has already exported compartment information for. // This assumes the order of the search results is the same every time the same search is performed. if (progress.SubSearch != null && result.Resource.ResourceId != progress.SubSearch.TriggeringResourceId) { resultIndex++; continue; } if (progress.SubSearch == null) { progress.NewSubSearch(result.Resource.ResourceId); } await RunExportCompartmentSearch(exportJobConfiguration, progress.SubSearch, sharedQueryParametersList, anonymizer, cancellationToken, currentBatchId + ":" + resultIndex.ToString("d6")); resultIndex++; progress.ClearSubSearch(); } } // Skips processing top level search results if the job only requested resources from the compartments of patients, but didn't want the patients. if (_exportJobRecord.ExportType == ExportJobType.All || string.IsNullOrWhiteSpace(_exportJobRecord.ResourceType) || _exportJobRecord.ResourceType.Contains(KnownResourceTypes.Patient, StringComparison.OrdinalIgnoreCase)) { await ProcessSearchResultsAsync(searchResult.Results, currentBatchId, anonymizer, cancellationToken); } if (searchResult.ContinuationToken == null) { // No more continuation token, we are done. break; } await ProcessProgressChange( exportJobConfiguration, progress, queryParametersList, searchResult.ContinuationToken, forceCommit : _exportJobRecord.ExportType == ExportJobType.Patient || _exportJobRecord.ExportType == ExportJobType.Group, cancellationToken); currentBatchId = batchIdPrefix + progress.Page.ToString("d6"); } // Commit one last time for any pending changes. await _exportDestinationClient.CommitAsync(exportJobConfiguration, cancellationToken); }
private async Task RunExportSearch( ExportJobConfiguration exportJobConfiguration, ExportJobProgress progress, List <Tuple <string, string> > sharedQueryParametersList, CancellationToken cancellationToken) { EnsureArg.IsNotNull(exportJobConfiguration, nameof(exportJobConfiguration)); EnsureArg.IsNotNull(progress, nameof(progress)); EnsureArg.IsNotNull(sharedQueryParametersList, nameof(sharedQueryParametersList)); List <Tuple <string, string> > queryParametersList = new List <Tuple <string, string> >(sharedQueryParametersList); if (progress.ContinuationToken != null) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.ContinuationToken, progress.ContinuationToken)); } var requestedResourceTypes = _exportJobRecord.ResourceType?.Split(','); var filteredResources = new HashSet <string>(StringComparer.OrdinalIgnoreCase); if (_exportJobRecord.Filters != null) { foreach (var filter in _exportJobRecord.Filters) { filteredResources.Add(filter.ResourceType); } } IAnonymizer anonymizer = IsAnonymizedExportJob() ? await CreateAnonymizerAsync(cancellationToken) : null; if (progress.CurrentFilter != null) { await ProcessFilter(exportJobConfiguration, progress, queryParametersList, sharedQueryParametersList, anonymizer, "filter", cancellationToken); } if (_exportJobRecord.Filters != null && _exportJobRecord.Filters.Any(filter => !progress.CompletedFilters.Contains(filter))) { foreach (var filter in _exportJobRecord.Filters) { if (!progress.CompletedFilters.Contains(filter) && requestedResourceTypes != null && requestedResourceTypes.Contains(filter.ResourceType, StringComparison.OrdinalIgnoreCase) && (_exportJobRecord.ExportType == ExportJobType.All || filter.ResourceType.Equals(KnownResourceTypes.Patient, StringComparison.OrdinalIgnoreCase))) { progress.SetFilter(filter); await ProcessFilter(exportJobConfiguration, progress, queryParametersList, sharedQueryParametersList, anonymizer, "filter", cancellationToken); } } } // The unfiltered search should be run if there were no filters specified, there were types requested that didn't have filters for them, or if a Patient/Group level export didn't have filters for Patients. // Examples: // If a patient/group export job with type and type filters is run, but patients aren't in the types requested, the search should be run here but no patients printed to the output // If a patient/group export job with type and type filters is run, and patients are in the types requested and filtered, the search should not be run as patients were searched above // If an export job with type and type filters is run, the search should not be run if all the types were searched above. if (_exportJobRecord.Filters == null || _exportJobRecord.Filters.Count == 0 || (_exportJobRecord.ExportType == ExportJobType.All && !requestedResourceTypes.All(resourceType => filteredResources.Contains(resourceType))) || ((_exportJobRecord.ExportType == ExportJobType.Patient || _exportJobRecord.ExportType == ExportJobType.Group) && !filteredResources.Contains(KnownResourceTypes.Patient))) { if (_exportJobRecord.ExportType == ExportJobType.Patient) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.Type, KnownResourceTypes.Patient)); } else if (_exportJobRecord.ExportType == ExportJobType.All && requestedResourceTypes != null) { List <string> resources = new List <string>(); foreach (var resource in requestedResourceTypes) { if (!filteredResources.Contains(resource)) { resources.Add(resource); } } if (resources.Count > 0) { queryParametersList.Add(Tuple.Create(KnownQueryParameterNames.Type, resources.JoinByOrSeparator())); } } await SearchWithFilter(exportJobConfiguration, progress, null, queryParametersList, sharedQueryParametersList, anonymizer, string.Empty, cancellationToken); } }
public AnonymizeController(ILogger <AnonymizeController> logger, IAnonymizer anonymizer) { _logger = logger; _anonymizer = anonymizer; }