protected override void OnExecute(CommandProcessor theProcessor) { Platform.CheckForNullReference(Context, "Context"); Platform.CheckForNullReference(Context.ReconcileWorkQueueData, "Context.ReconcileWorkQueueData"); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); try { using (var processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid))) { var deleteFile = new FileDeleteCommand(imagePath, true); var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteFile); processor.AddCommand(deleteUid); Platform.Log(ServerPlatform.InstanceLogLevel, deleteFile.ToString()); if (!processor.Execute()) { throw new Exception(String.Format("Unable to discard image {0}", uid.SopInstanceUid)); } } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception discarding file: {0}", imagePath); SopInstanceProcessor.FailUid(uid, true); } } }
private void ProcessUidList() { string lastErrorMessage = ""; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Add the update commands to context.UpdateCommands.AddRange(BuildUpdateCommandList()); // Add command to update the Series & Sop Instances. context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper)); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); PrintUpdateCommands(context.UpdateCommands); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { // Load the file outside the try/catch block so it can be // referenced in the c string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, Context.Partition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true }; ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid), SopInstanceProcessorSopType.NewSop); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } _processedCount++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, _processedCount, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { // TODO (Rigel) - Check if we should include the WorkItemData to insert into the WorkQueue here. DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid, null); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } _failedCount++; } } if (_processedCount == 0) { throw new ApplicationException(lastErrorMessage); } }
private void ProcessUidList() { int counter = 0; Platform.Log(LogLevel.Info, "Populating images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage) { SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey) }; // Load the rules engine context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Add the update commands to update the files. Note that the new Study Instance Uid is already part of this update. context.UpdateCommands.AddRange(Commands); // Add command to update the Series & Sop Instances. context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper)); // Create/Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); string lastErrorMessage = ""; foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); var file = new DicomFile(imagePath); var sopProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true }; try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime); ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, imagePath); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } counter++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled and Processed SOP {0} [{1} of {2}]", uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } } if (counter == 0) { throw new ApplicationException(lastErrorMessage); } }
private void ProcessUidList() { int counter = 0; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); string lastErrorMessage = ""; foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context); ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid), SopInstanceProcessorSopType.NewSop); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } counter++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { // TODO (Rigel) - Check if we should include the WorkQueueData field here DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid, null); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } } } if (counter == 0) { throw new ApplicationException(lastErrorMessage); } }