public void InsertFailedWorkItemUid(WorkItem workItem, DicomMessageBase message, DicomProcessingResult result, int tryCount = 1) { if (tryCount < 0) tryCount = 1; int tries = 0; while (tries++ < tryCount) { using (var commandProcessor = new ViewerCommandProcessor(String.Format("Processing Sop Instance {0}", result.SopInstanceUid))) { var fileName = Guid.NewGuid().ToString() + ".dcm"; var insertWorkItemCommand = CreateWorkItemCommand(workItem, result, message, true, fileName); //Fail the Uid immediately, since we know the file isn't there. insertWorkItemCommand.WorkItemUid.Failed = true; commandProcessor.AddCommand(insertWorkItemCommand); if (commandProcessor.Execute()) { IncrementTotalFiles(insertWorkItemCommand, result.StudyInstanceUid, result.ErrorMessage); return; } } Thread.Sleep(10); } Platform.Log(LogLevel.Error, "Failed to insert failed work item UID after {0} attempts (Sop Instance UID={1}).", tries, result.SopInstanceUid); }
public bool Process() { using ( var processor = new ViewerCommandProcessor("Deleting series from study: " + _location.Study.StudyInstanceUid)) { try { DicomAttributeCollection instance = null; foreach (string seriesInstanceUid in SeriesInstanceUids) { foreach (SeriesXml seriesXml in StudyXml) { if (seriesXml.SeriesInstanceUid.Equals(seriesInstanceUid)) { foreach (InstanceXml instanceXml in seriesXml) { processor.AddCommand( new FileDeleteCommand( _location.GetSopInstancePath(seriesInstanceUid, instanceXml.SopInstanceUid), true)); } } else { // Save an instance we're keeping so we can update the study if (instance == null) { instance = CollectionUtils.FirstElement(seriesXml).Collection; } } } processor.AddCommand(new RemoveSeriesFromStudyXml(StudyXml, seriesInstanceUid)); } processor.AddCommand(new SaveStudyXmlCommand(StudyXml, _location)); processor.AddCommand(new InsertOrUpdateStudyCommand(_location, StudyXml, InsertOrUpdateStudyCommand.UpdateReason.SopsDeleted)); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure deleting {0} series for Study: {1}", SeriesInstanceUids.Count, _location.Study.StudyInstanceUid); throw new ApplicationException( "Unexpected failure (" + processor.FailureReason + ") executing command for Study: " + _location.Study.StudyInstanceUid, processor.FailureException); } Platform.Log(LogLevel.Info, "Deleted {0} Series for Study {1}", SeriesInstanceUids.Count, _location.Study.StudyInstanceUid); return(true); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } } }
private void Process(DicomMessageBase message, FileImportBehaviourEnum fileImportBehaviour, WorkItem workItem, DicomProcessingResult result) { result.Initialize(); // Use the command processor for rollback capabilities. using (var commandProcessor = new ViewerCommandProcessor(String.Format("Processing Sop Instance {0}", result.SopInstanceUid))) { try { var studyLocation = new StudyLocation(result.StudyInstanceUid); String destinationFile = studyLocation.GetSopInstancePath(result.SeriesInstanceUid, result.SopInstanceUid); DicomFile file = ConvertToDicomFile(message, destinationFile, _context.SourceAE); // Create the Study Folder, if need be commandProcessor.AddCommand(new CreateDirectoryCommand(studyLocation.StudyFolder)); bool duplicateFile = false; string dupName = null; if (File.Exists(destinationFile)) { // TODO (CR Jun 2012): Shouldn't the commands themselves make this decision at the time // the file is being saved? Otherwise, what happens if the same SOP were being saved 2x simultaneously. // I know the odds are low, but just pointing it out. duplicateFile = true; dupName = Guid.NewGuid().ToString() + ".dcm"; destinationFile = Path.Combine(Path.GetDirectoryName(destinationFile), dupName); } if (fileImportBehaviour == FileImportBehaviourEnum.Move) { commandProcessor.AddCommand(new RenameFileCommand(file.Filename, destinationFile, true)); } else if (fileImportBehaviour == FileImportBehaviourEnum.Copy) { commandProcessor.AddCommand(new CopyFileCommand(file.Filename, destinationFile, true)); } else if (fileImportBehaviour == FileImportBehaviourEnum.Save) { commandProcessor.AddCommand(new SaveDicomFileCommand(destinationFile, file, true)); } var insertWorkItemCommand = CreateWorkItemCommand(workItem, result, file, duplicateFile, dupName); commandProcessor.AddCommand(insertWorkItemCommand); if (commandProcessor.Execute()) { result.DicomStatus = DicomStatuses.Success; IncrementTotalFiles(insertWorkItemCommand, result.StudyInstanceUid); } else { if (commandProcessor.FailureException is ChangeConflictException || commandProcessor.FailureException is SqlCeLockTimeoutException) result.RetrySuggested = true; // Change conflict or lock timeout may work if we just retry Platform.Log(LogLevel.Warn, "Failure Importing file: {0}", file.Filename); string failureMessage = String.Format( "Failure processing message: {0}. Sending failure status.", commandProcessor.FailureReason); result.SetError(DicomStatuses.ProcessingFailure, failureMessage); // processor already rolled back } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", commandProcessor.Description); commandProcessor.Rollback(); result.SetError(result.DicomStatus ?? DicomStatuses.ProcessingFailure, e.Message); } } }
private void InsertBatch(IList <ProcessorFile> list, StudyXml studyXml) { using (var processor = new ViewerCommandProcessor("Processing WorkItem DICOM file(s)")) { try { // Create an AggregrateCommand where we batch together all the database updates // and execute them together as the last command. var batchDatabaseCommand = new AggregateCommand(); foreach (var file in list) { if (!string.IsNullOrEmpty(file.FilePath) && file.File == null) { try { file.File = new DicomFile(file.FilePath); // WARNING: If we ever do anything where we update files and save them, // we may have to change this. file.File.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); } catch (FileNotFoundException) { Platform.Log(LogLevel.Warn, "File to be processed is not found, ignoring: {0}", file.FilePath); if (file.ItemUid != null) { batchDatabaseCommand.AddSubCommand(new CompleteWorkItemUidCommand(file.ItemUid)); } continue; } } else { file.FilePath = file.File.Filename; } String seriesUid = file.File.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty); String sopUid = file.File.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty); String finalDest = StudyLocation.GetSopInstancePath(seriesUid, sopUid); if (file.FilePath != finalDest) { processor.AddCommand(new RenameFileCommand(file.FilePath, finalDest, false)); } // Update the StudyStream object var insertStudyXmlCommand = new InsertStudyXmlCommand(file.File, studyXml, StudyLocation, false); processor.AddCommand(insertStudyXmlCommand); if (file.ItemUid != null) { batchDatabaseCommand.AddSubCommand(new CompleteWorkItemUidCommand(file.ItemUid)); } } // Now save the batched updates to the StudyXml file. processor.AddCommand(new SaveStudyXmlCommand(studyXml, StudyLocation)); // Update the Study table, based on the studyXml var updateReason = IsReprocess ? InsertOrUpdateStudyCommand.UpdateReason.Reprocessing : InsertOrUpdateStudyCommand.UpdateReason.LiveImport; batchDatabaseCommand.AddSubCommand(new InsertOrUpdateStudyCommand(StudyLocation, studyXml, updateReason)); // Now, add all the batched database updates processor.AddCommand(batchDatabaseCommand); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure processing {0} for Study: {1}", processor.Description, StudyLocation.Study.StudyInstanceUid); throw new ApplicationException( "Unexpected failure (" + processor.FailureReason + ") executing command for Study: " + StudyLocation.Study.StudyInstanceUid, processor.FailureException); } StudyLocation.Study = processor.ViewerContext.ContextStudy; Platform.Log(LogLevel.Info, "Processed {0} SOPs for Study {1}", list.Count, StudyLocation.Study.StudyInstanceUid); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } } }