protected override void OnExecute(CommandProcessor theProcessor) { Platform.CheckForNullReference(Context, "Context"); Platform.CheckForNullReference(Context.ReconcileWorkQueueData, "Context.ReconcileWorkQueueData"); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); try { using (var processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid))) { var deleteFile = new FileDeleteCommand(imagePath, true); var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteFile); processor.AddCommand(deleteUid); Platform.Log(ServerPlatform.InstanceLogLevel, deleteFile.ToString()); if (!processor.Execute()) { throw new Exception(String.Format("Unable to discard image {0}", uid.SopInstanceUid)); } } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception discarding file: {0}", imagePath); SopInstanceProcessor.FailUid(uid, true); } } }
private void AddDuplicateToStudy(DicomFile duplicateDicomFile, WorkQueueUid uid, ProcessDuplicateAction action) { var context = new StudyProcessorContext(StorageLocation, WorkQueueItem); var sopInstanceProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true }; string group = uid.GroupID ?? ServerHelper.GetUidGroup(duplicateDicomFile, ServerPartition, WorkQueueItem.InsertTime); StudyXml studyXml = StorageLocation.LoadStudyXml(); int originalInstanceCount = studyXml.NumberOfStudyRelatedInstances; bool compare = action != ProcessDuplicateAction.OverwriteAsIs; // NOTE: "compare" has no effect for OverwriteUseExisting or OverwriteUseDuplicate // because in both cases, the study and the duplicates are modified to be the same. ProcessingResult result = sopInstanceProcessor.ProcessFile(group, duplicateDicomFile, studyXml, compare, true, uid, duplicateDicomFile.Filename, SopInstanceProcessorSopType.UpdatedSop); if (result.Status == ProcessingStatus.Reconciled) { throw new ApplicationException("Unexpected status of Reconciled image in duplicate handling!"); } Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCount + 1); Debug.Assert(File.Exists(StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid))); }
/// <summary> /// Process a specific DICOM file related to a <see cref="WorkQueue"/> request. /// </summary> /// <param name="queueUid"></param> /// <param name="stream">The <see cref="StudyXml"/> file to update with information from the file.</param> /// <param name="file">The file being processed.</param> /// <param name="compare">Indicates whether to compare the DICOM file against the study in the system.</param> protected virtual void ProcessFile(WorkQueueUid queueUid, DicomFile file, StudyXml stream, bool compare) { var processor = new SopInstanceProcessor(Context) { EnforceNameRules = true }; var fileInfo = new FileInfo(file.Filename); long fileSize = fileInfo.Length; processor.InstanceStats.FileLoadTime.Start(); processor.InstanceStats.FileLoadTime.End(); processor.InstanceStats.FileSize = (ulong)fileSize; string sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, "File:" + fileInfo.Name); processor.InstanceStats.Description = sopInstanceUid; string group = queueUid.GroupID ?? ServerHelper.GetUidGroup(file, ServerPartition, WorkQueueItem.InsertTime); ProcessingResult result = processor.ProcessFile(group, file, stream, compare, true, queueUid, null); if (result.Status == ProcessingStatus.Reconciled) { // file has been saved by SopInstanceProcessor in another place for reconcilation // Note: SopInstanceProcessor has removed the WorkQueueUid so we // only need to delete the file here. FileUtils.Delete(fileInfo.FullName); } Statistics.StudyInstanceUid = StorageLocation.StudyInstanceUid; if (String.IsNullOrEmpty(processor.Modality) == false) { Statistics.Modality = processor.Modality; } // Update the statistics Statistics.NumInstances++; Statistics.AddSubStats(processor.InstanceStats); }
protected override void ProcessFile(Model.WorkQueueUid queueUid, DicomFile file, ClearCanvas.Dicom.Utilities.Xml.StudyXml stream, bool compare) { Platform.CheckFalse(compare, "compare"); SopInstanceProcessor processor = new SopInstanceProcessor(Context); FileInfo fileInfo = new FileInfo(file.Filename); long fileSize = fileInfo.Length; processor.InstanceStats.FileSize = (ulong)fileSize; string sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, "File:" + fileInfo.Name); processor.InstanceStats.Description = sopInstanceUid; if (Study != null) { StudyComparer comparer = new StudyComparer(); DifferenceCollection list = comparer.Compare(file, Study, ServerPartition.GetComparisonOptions()); if (list != null && list.Count > 0) { Platform.Log(LogLevel.Warn, "Dicom file contains information inconsistent with the study in the system"); } } string groupID = ServerHelper.GetUidGroup(file, StorageLocation.ServerPartition, WorkQueueItem.InsertTime); processor.ProcessFile(groupID, file, stream, false, false, null, null); Statistics.StudyInstanceUid = StorageLocation.StudyInstanceUid; if (String.IsNullOrEmpty(processor.Modality) == false) { Statistics.Modality = processor.Modality; } // Update the statistics Statistics.NumInstances++; }
private void ProcessUidList() { string lastErrorMessage = ""; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Add the update commands to context.UpdateCommands.AddRange(BuildUpdateCommandList()); // Add command to update the Series & Sop Instances. context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper)); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); PrintUpdateCommands(context.UpdateCommands); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { // Load the file outside the try/catch block so it can be // referenced in the c string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, Context.Partition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true }; ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid), SopInstanceProcessorSopType.NewSop); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } _processedCount++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, _processedCount, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { // TODO (Rigel) - Check if we should include the WorkItemData to insert into the WorkQueue here. DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid, null); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } _failedCount++; } } if (_processedCount == 0) { throw new ApplicationException(lastErrorMessage); } }
private void ProcessUidList() { int counter = 0; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); string lastErrorMessage=""; foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context); ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid)); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } counter++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } } } if (counter == 0) { throw new ApplicationException(lastErrorMessage); } }
private void ProcessUidList() { string lastErrorMessage = ""; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Add the update commands to context.UpdateCommands.AddRange(BuildUpdateCommandList()); // Add command to update the Series & Sop Instances. context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper)); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); PrintUpdateCommands(context.UpdateCommands); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { // Load the file outside the try/catch block so it can be // referenced in the c string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, Context.Partition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context) {EnforceNameRules = true }; ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid)); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } _processedCount++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, _processedCount, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } _failedCount++; } } if (_processedCount==0) { throw new ApplicationException(lastErrorMessage); } }
private void ProcessUidList() { int counter = 0; Platform.Log(LogLevel.Info, "Populating images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage) { SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey) }; // Load the rules engine context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Add the update commands to update the files. Note that the new Study Instance Uid is already part of this update. context.UpdateCommands.AddRange(Commands); // Add command to update the Series & Sop Instances. context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper)); // Create/Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); string lastErrorMessage = ""; foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); var file = new DicomFile(imagePath); var sopProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true }; try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime); ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, imagePath); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } counter++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled and Processed SOP {0} [{1} of {2}]", uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } } if (counter == 0) { throw new ApplicationException(lastErrorMessage); } }
private void ProcessUidList() { int counter = 0; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); string lastErrorMessage = ""; foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context); ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid), SopInstanceProcessorSopType.NewSop); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } counter++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { // TODO (Rigel) - Check if we should include the WorkQueueData field here DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid, null); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } } } if (counter == 0) { throw new ApplicationException(lastErrorMessage); } }
protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckForNullReference(item, "item"); Platform.CheckForNullReference(item.StudyStorageKey, "item.StudyStorageKey"); var context = new StudyProcessorContext(StorageLocation); // TODO: Should we enforce the patient's name rule? // If we do, the Study record will have the new patient's name // but how should we handle the name in the Patient record? bool enforceNameRules = false; var processor = new SopInstanceProcessor(context) { EnforceNameRules = enforceNameRules }; var seriesMap = new Dictionary <string, List <string> >(); bool successful = true; string failureDescription = null; // The processor stores its state in the Data column ReadQueueData(item); if (_queueData.State == null || !_queueData.State.ExecuteAtLeastOnce) { // Added for ticket #9673: // If the study folder does not exist and the study has been archived, trigger a restore and we're done if (!Directory.Exists(StorageLocation.GetStudyPath())) { if (StorageLocation.ArchiveLocations.Count > 0) { Platform.Log(LogLevel.Info, "Reprocessing archived study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} without study data on the filesystem. Inserting Restore Request.", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); // Post process had to be done first so the study is unlocked so the RestoreRequest can be inserted. ServerHelper.InsertRestoreRequest(StorageLocation); RaiseAlert(WorkQueueItem, AlertLevel.Warning, string.Format( "Found study {0} for Patient {1} (A#:{2})on Partition {3} without storage folder, restoring study.", Study.StudyInstanceUid, Study.PatientsName, Study.AccessionNumber, ServerPartition.Description)); return; } } if (Study == null) { Platform.Log(LogLevel.Info, "Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } else { Platform.Log(LogLevel.Info, "Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); } CleanupDatabase(); } else { if (_queueData.State.Completed) { #region SAFE-GUARD CODE: PREVENT INFINITE LOOP // The processor indicated it had completed reprocessing in previous run. The entry should have been removed and this block of code should never be called. // However, we have seen ReprocessStudy entries that mysterously contain rows in the WorkQueueUid table. // The rows prevent the entry from being removed from the database and the ReprocessStudy keeps repeating itself. // update the state first, increment the CompleteAttemptCount _queueData.State.ExecuteAtLeastOnce = true; _queueData.State.Completed = true; _queueData.State.CompleteAttemptCount++; SaveState(item, _queueData); if (_queueData.State.CompleteAttemptCount < 10) { // maybe there was db error in previous attempt to remove the entry. Let's try again. Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} but it was already completed!!!", StorageLocation.StudyInstanceUid); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { // we are definitely stuck. Platform.Log(LogLevel.Error, "ReprocessStudy {0} for study {1} appears stuck. Aborting it.", item.Key, StorageLocation.StudyInstanceUid); item.FailureDescription = "This entry had completed but could not be removed."; PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); } return; #endregion } if (Study == null) { Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } else { Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); } } StudyXml studyXml = LoadStudyXml(); int reprocessedCounter = 0; var removedFiles = new List <FileInfo>(); try { // Traverse the directories, process 500 files at a time FileProcessor.Process(StorageLocation.GetStudyPath(), "*.*", delegate(string path, out bool cancel) { #region Reprocess File var file = new FileInfo(path); // ignore all files except those ending ".dcm" // ignore "bad(0).dcm" files too if (Regex.IsMatch(file.Name.ToUpper(), "[0-9]+\\.DCM$")) { try { var dicomFile = new DicomFile(path); dicomFile.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); string seriesUid = dicomFile.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty); string instanceUid = dicomFile.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty); if (studyXml.Contains(seriesUid, instanceUid)) { if (!seriesMap.ContainsKey(seriesUid)) { seriesMap.Add(seriesUid, new List <string>()); } if (!seriesMap[seriesUid].Contains(instanceUid)) { seriesMap[seriesUid].Add(instanceUid); } else { Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path); } } else { Platform.Log(ServerPlatform.InstanceLogLevel, "Reprocessing SOP {0} for study {1}", instanceUid, StorageLocation.StudyInstanceUid); string groupId = ServerHelper.GetUidGroup(dicomFile, StorageLocation.ServerPartition, WorkQueueItem.InsertTime); ProcessingResult result = processor.ProcessFile(groupId, dicomFile, studyXml, true, false, null, null); switch (result.Status) { case ProcessingStatus.Success: reprocessedCounter++; if (!seriesMap.ContainsKey(seriesUid)) { seriesMap.Add(seriesUid, new List <string>()); } if (!seriesMap[seriesUid].Contains(instanceUid)) { seriesMap[seriesUid].Add(instanceUid); } else { Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path); } break; case ProcessingStatus.Reconciled: Platform.Log(LogLevel.Warn, "SOP was unexpectedly reconciled on reprocess SOP {0} for study {1}. It will be removed from the folder.", instanceUid, StorageLocation.StudyInstanceUid); failureDescription = String.Format("SOP Was reconciled: {0}", instanceUid); // Added for #10620 (Previously we didn't do anything here) // Because we are reprocessing files in the study folder, when file needs to be reconciled it is copied to the reconcile folder // Therefore, we need to delete the one in the study folder. Otherwise, there will be problem when the SIQ entry is reconciled. // InstanceAlreadyExistsException will also be thrown by the SOpInstanceProcessor if this ReprocessStudy WQI // resumes and reprocesses the same file again. // Note: we are sure that the file has been copied to the Reconcile folder and there's no way back. // We must get rid of this file in the study folder. FileUtils.Delete(path); // Special handling: if the file is one which we're supposed to reprocess at the end (see ProcessAdditionalFiles), we must remove the file from the list if (_additionalFilesToProcess != null && _additionalFilesToProcess.Contains(path)) { _additionalFilesToProcess.Remove(path); } break; } } } catch (DicomException ex) { // TODO : should we fail the reprocess instead? Deleting an dicom file can lead to incomplete study. removedFiles.Add(file); Platform.Log(LogLevel.Warn, "Skip reprocessing and delete {0}: Not readable.", path); FileUtils.Delete(path); failureDescription = ex.Message; } } else if (!file.Extension.Equals(".xml") && !file.Extension.Equals(".gz")) { // not a ".dcm" or header file, delete it removedFiles.Add(file); FileUtils.Delete(path); } #endregion cancel = reprocessedCounter >= 500; }, true); if (studyXml != null) { EnsureConsistentObjectCount(studyXml, seriesMap); SaveStudyXml(studyXml); } // Completed if either all files have been reprocessed // or no more dicom files left that can be reprocessed. _completed = reprocessedCounter == 0; } catch (Exception e) { successful = false; failureDescription = e.Message; Platform.Log(LogLevel.Error, e, "Unexpected exception when reprocessing study: {0}", StorageLocation.StudyInstanceUid); Platform.Log(LogLevel.Error, "Study may be in invalid unprocessed state. Study location: {0}", StorageLocation.GetStudyPath()); throw; } finally { LogRemovedFiles(removedFiles); // Update the state _queueData.State.ExecuteAtLeastOnce = true; _queueData.State.Completed = _completed; _queueData.State.CompleteAttemptCount++; SaveState(item, _queueData); if (!successful) { FailQueueItem(item, failureDescription); } else { if (!_completed) { // Put it back to Pending PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } else { LogHistory(); // Run Study / Series Rules Engine. var engine = new StudyRulesEngine(StorageLocation, ServerPartition); engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed); // Log the FilesystemQueue related entries StorageLocation.LogFilesystemQueue(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); Platform.Log(LogLevel.Info, "Completed reprocessing of study {0} on partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } } } }