/// <summary> /// Overwrites existing copy with the received duplicate. Update the database, study xml and applies any SOP rules. /// </summary> /// <param name="dupFile"></param> /// <param name="uid"></param> /// <param name="studyXml"></param> /// <returns></returns> private ProcessDuplicateResult OverwriteAndUpdateDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml) { Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid); var result = new ProcessDuplicateResult(); result.ActionTaken = DuplicateProcessResultAction.Accept; using (var processor = new ServerCommandProcessor("Overwrite duplicate instance")) { var sopContext = new ServerActionContext(dupFile, Context.StorageLocation.FilesystemKey, Context.StorageLocation.ServerPartition, Context.StorageLocation.Key, processor); var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid); processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false)); // Do so that the FileSize calculation inInsertStudyXmlCommand works dupFile.Filename = destination; // Update the StudyStream object var insertStudyXmlCommand = new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation); processor.AddCommand(insertStudyXmlCommand); // Ideally we don't need to insert the instance into the database since it's a duplicate. // However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study. // For other cases, this will cause the instance count in the DB to be out of sync with the filesystem. // But it will be corrected at the end of the processing when the study verification is executed. processor.AddCommand(new UpdateInstanceCommand(Context.StorageLocation.ServerPartition, Context.StorageLocation, dupFile)); processor.AddCommand(new DeleteWorkQueueUidCommand(uid)); processor.AddCommand(new ApplySopRulesCommand(sopContext, Context.SopProcessedRulesEngine)); if (!processor.Execute()) { EventManager.FireEvent(this, new FailedUpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason }); // cause the item to fail throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException); } EventManager.FireEvent(this, new UpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize }); } return(result); }
private void InsertInstance(DicomFile file, StudyXml stream, WorkQueueUid uid, string deleteFile) { using (ServerCommandProcessor processor = new ServerCommandProcessor("Processing WorkQueue DICOM file")) { EventsHelper.Fire(OnInsertingSop, this, new SopInsertingEventArgs { Processor = processor }); InsertInstanceCommand insertInstanceCommand = null; InsertStudyXmlCommand insertStudyXmlCommand = null; String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, String.Empty); _modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty); if (_context.UpdateCommands.Count > 0) { foreach (BaseImageLevelUpdateCommand command in _context.UpdateCommands) { command.File = file; processor.AddCommand(command); } } try { // Create a context for applying actions from the rules engine ServerActionContext context = new ServerActionContext(file, _context.StorageLocation.FilesystemKey, _context.Partition, _context.StorageLocation.Key); context.CommandProcessor = processor; _context.SopCompressionRulesEngine.Execute(context); String seriesUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty); String sopUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty); String finalDest = _context.StorageLocation.GetSopInstancePath(seriesUid, sopUid); if (_context.UpdateCommands.Count > 0) { processor.AddCommand(new SaveDicomFileCommand(_context.StorageLocation, file, file.Filename != finalDest)); } else if (file.Filename != finalDest || processor.CommandCount > 0) { // Have to be careful here about failure on exists vs. not failing on exists // because of the different use cases of the importer. // save the file in the study folder, or if its been compressed processor.AddCommand(new SaveDicomFileCommand(finalDest, file, file.Filename != finalDest)); } // Update the StudyStream object insertStudyXmlCommand = new InsertStudyXmlCommand(file, stream, _context.StorageLocation); processor.AddCommand(insertStudyXmlCommand); // Have the rules applied during the command processor, and add the objects. processor.AddCommand(new ApplySopRulesCommand(context, _context.SopProcessedRulesEngine)); // If specified, delete the file if (deleteFile != null) { processor.AddCommand(new FileDeleteCommand(deleteFile, true)); } // Insert into the database, but only if its not a duplicate so the counts don't get off insertInstanceCommand = new InsertInstanceCommand(file, _context.StorageLocation); processor.AddCommand(insertInstanceCommand); // Do a check if the StudyStatus value should be changed in the StorageLocation. This // should only occur if the object has been compressed in the previous steps. processor.AddCommand(new UpdateStudyStatusCommand(_context.StorageLocation, file)); if (uid != null) { processor.AddCommand(new DeleteWorkQueueUidCommand(uid)); } // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure processing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid); Platform.Log(LogLevel.Error, "File that failed processing: {0}", file.Filename); throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException); } Platform.Log(ServerPlatform.InstanceLogLevel, "Processed SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid, patientsName); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } finally { if (insertInstanceCommand != null && insertInstanceCommand.Statistics.IsSet) { _instanceStats.InsertDBTime.Add(insertInstanceCommand.Statistics); } if (insertStudyXmlCommand != null && insertStudyXmlCommand.Statistics.IsSet) { _instanceStats.InsertStreamTime.Add(insertStudyXmlCommand.Statistics); } } } }
private void InsertBatch(IList <ProcessorFile> list, StudyXml studyXml) { using (var processor = new ViewerCommandProcessor("Processing WorkItem DICOM file(s)")) { try { // Create an AggregrateCommand where we batch together all the database updates // and execute them together as the last command. var batchDatabaseCommand = new AggregateCommand(); foreach (var file in list) { if (!string.IsNullOrEmpty(file.FilePath) && file.File == null) { try { file.File = new DicomFile(file.FilePath); // WARNING: If we ever do anything where we update files and save them, // we may have to change this. file.File.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); } catch (FileNotFoundException) { Platform.Log(LogLevel.Warn, "File to be processed is not found, ignoring: {0}", file.FilePath); if (file.ItemUid != null) { batchDatabaseCommand.AddSubCommand(new CompleteWorkItemUidCommand(file.ItemUid)); } continue; } } else { file.FilePath = file.File.Filename; } String seriesUid = file.File.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty); String sopUid = file.File.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty); String finalDest = StudyLocation.GetSopInstancePath(seriesUid, sopUid); if (file.FilePath != finalDest) { processor.AddCommand(new RenameFileCommand(file.FilePath, finalDest, false)); } // Update the StudyStream object var insertStudyXmlCommand = new InsertStudyXmlCommand(file.File, studyXml, StudyLocation, false); processor.AddCommand(insertStudyXmlCommand); if (file.ItemUid != null) { batchDatabaseCommand.AddSubCommand(new CompleteWorkItemUidCommand(file.ItemUid)); } } // Now save the batched updates to the StudyXml file. processor.AddCommand(new SaveStudyXmlCommand(studyXml, StudyLocation)); // Update the Study table, based on the studyXml var updateReason = IsReprocess ? InsertOrUpdateStudyCommand.UpdateReason.Reprocessing : InsertOrUpdateStudyCommand.UpdateReason.LiveImport; batchDatabaseCommand.AddSubCommand(new InsertOrUpdateStudyCommand(StudyLocation, studyXml, updateReason)); // Now, add all the batched database updates processor.AddCommand(batchDatabaseCommand); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure processing {0} for Study: {1}", processor.Description, StudyLocation.Study.StudyInstanceUid); throw new ApplicationException( "Unexpected failure (" + processor.FailureReason + ") executing command for Study: " + StudyLocation.Study.StudyInstanceUid, processor.FailureException); } StudyLocation.Study = processor.ViewerContext.ContextStudy; Platform.Log(LogLevel.Info, "Processed {0} SOPs for Study {1}", list.Count, StudyLocation.Study.StudyInstanceUid); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } } }