protected override void OnExecute(CommandProcessor theProcessor) { Platform.CheckForNullReference(Context, "Context"); Platform.CheckForNullReference(Context.ReconcileWorkQueueData, "Context.ReconcileWorkQueueData"); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); try { using (var processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid))) { var deleteFile = new FileDeleteCommand(imagePath, true); var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteFile); processor.AddCommand(deleteUid); Platform.Log(ServerPlatform.InstanceLogLevel, deleteFile.ToString()); if (!processor.Execute()) { throw new Exception(String.Format("Unable to discard image {0}", uid.SopInstanceUid)); } } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception discarding file: {0}", imagePath); SopInstanceProcessor.FailUid(uid, true); } } }
/// <summary> /// Do the actual rebuild. On error, will attempt to reprocess the study. /// </summary> public void RebuildXml() { string rootStudyPath = _location.GetStudyPath(); try { using (ServerCommandProcessor processor = new ServerCommandProcessor("Rebuild XML")) { var command = new RebuildStudyXmlCommand(_location.StudyInstanceUid, rootStudyPath); processor.AddCommand(command); var updateCommand = new UpdateStudySizeInDBCommand(_location, command); processor.AddCommand(updateCommand); if (!processor.Execute()) { throw new ApplicationException(processor.FailureReason, processor.FailureException); } Study theStudy = _location.Study; if (theStudy.NumberOfStudyRelatedInstances != command.StudyXml.NumberOfStudyRelatedInstances) { // We rebuilt, but the counts don't match. throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(theStudy, _location.ServerPartition), string.Format( "Database study count {0} does not match study xml {1}", theStudy.NumberOfStudyRelatedInstances, command.StudyXml.NumberOfStudyRelatedInstances)); } Platform.Log(LogLevel.Info, "Completed reprocessing Study XML file for study {0}", _location.StudyInstanceUid); } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected error when rebuilding study XML for directory: {0}", _location.FilesystemPath); StudyReprocessor reprocessor = new StudyReprocessor(); try { WorkQueue reprocessEntry = reprocessor.ReprocessStudy("Rebuild StudyXml", _location, Platform.Time); if (reprocessEntry != null) { Platform.Log(LogLevel.Error, "Failure attempting to reprocess study: {0}", _location.StudyInstanceUid); } else Platform.Log(LogLevel.Error, "Inserted reprocess request for study: {0}", _location.StudyInstanceUid); } catch(InvalidStudyStateOperationException ex) { Platform.Log(LogLevel.Error, "Failure attempting to reprocess study {0}: {1}", _location.StudyInstanceUid, ex.Message); } } }
/// <summary> /// Apply the Rules engine. /// </summary> /// <remarks> /// <para> /// This method applies the rules engine to the first image in each series within a study. /// The assumption is that the actions generated by the engine can handle being applied more /// than once for the same study. This is also done to handle the case of multi-modality /// studies where you may want the rules to be run against each series, because they may /// apply differently. /// </para> /// <para> /// Note that we are still applying series level moves, although there currently are not /// any series level rules. We've somewhat turned the study level rules into series /// level rules. /// </para> /// </remarks> public void Apply(ServerRuleApplyTimeEnum applyTime) { using(var theProcessor = new ServerCommandProcessor("Study Rule Processor")) { Apply(applyTime, theProcessor); if (false == theProcessor.Execute()) { Platform.Log(LogLevel.Error, "Unexpected failure processing Study level rules for study {0} on partition {1} for {2} apply time", _location.StudyInstanceUid, _partition.Description, applyTime.Description); } } }
private DicomProcessingResult HandleDuplicate(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomFile file) { Study study = studyLocation.Study ?? studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); if (study != null) Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2} Patient: {3} ID:{4})", sopInstanceUid, study.AccessionNumber, study.StudyInstanceUid, study.PatientsName, study.PatientId); else Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.", sopInstanceUid, studyLocation.StudyInstanceUid); SopProcessingContext sopProcessingContext = new SopProcessingContext(commandProcessor, studyLocation, _context.ContextID); DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, file); return result; }
private void ProcessUid(WorkQueueUid uid) { Platform.CheckForNullReference(uid, "uid"); string imagePath = GetUidPath(uid); using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid))) { // If the file for some reason doesn't exist, we just ignore it if (File.Exists(imagePath)) { Platform.Log(ServerPlatform.InstanceLogLevel, "Deleting {0}", imagePath); FileDeleteCommand deleteFile = new FileDeleteCommand(imagePath, true); processor.AddCommand(deleteFile); } else { Platform.Log(LogLevel.Warn, "WARNING {0} is missing.", imagePath); } DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteUid); if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete image {0}", uid.SopInstanceUid)); } } }
private void InsertInstance(DicomFile file) { StudyStorageLocation location; string studyInstanceUid = file.DataSet[DicomTags.StudyInstanceUid].ToString(); string studyDate = file.DataSet[DicomTags.StudyDate].ToString(); using (IUpdateContext context = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { bool created; location = FilesystemMonitor.Instance.GetOrCreateWritableStudyStorageLocation(studyInstanceUid, studyDate, TransferSyntax.ExplicitVrLittleEndian, context, _partition, out created); context.Commit(); } using (ServerCommandProcessor processor = new ServerCommandProcessor("Processing WorkQueue DICOM file")) { try { // Insert into the database, but only if its not a duplicate so the counts don't get off InsertInstanceCommand insertInstanceCommand = new InsertInstanceCommand(file, location); processor.AddCommand(insertInstanceCommand); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure processing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid); Platform.Log(LogLevel.Error, "File that failed processing: {0}", file.Filename); throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException); } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } } }
private static void UpdateNameBasedOnRules(DicomFile file) { string orginalPatientsNameInFile = file.DataSet[DicomTags.PatientsName].ToString(); if (String.IsNullOrEmpty(orginalPatientsNameInFile)) return; using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Patient's Name")) { string normPatName = GetAcceptableName(orginalPatientsNameInFile); if (!orginalPatientsNameInFile.Equals(normPatName, StringComparison.InvariantCultureIgnoreCase)) { processor.AddCommand(new SetTagCommand(file, DicomTags.PatientsName, orginalPatientsNameInFile, normPatName)); if (!processor.Execute()) { throw new ApplicationException(String.Format("AUTO-CORRECTION Failed: Unable to correct the patient's name in the image. Reason: {0}", processor.FailureReason), processor.FailureException); } } } return; }
/// <summary> /// Migrates the study to new tier /// </summary> /// <param name="storage"></param> /// <param name="newFilesystem"></param> private void DoMigrateStudy(StudyStorageLocation storage, ServerFilesystemInfo newFilesystem) { Platform.CheckForNullReference(storage, "storage"); Platform.CheckForNullReference(newFilesystem, "newFilesystem"); TierMigrationStatistics stat = new TierMigrationStatistics {StudyInstanceUid = storage.StudyInstanceUid}; stat.ProcessSpeed.Start(); StudyXml studyXml = storage.LoadStudyXml(); stat.StudySize = (ulong) studyXml.GetStudySize(); Platform.Log(LogLevel.Info, "About to migrate study {0} from {1} to {2}", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.Description); string newPath = Path.Combine(newFilesystem.Filesystem.FilesystemPath, storage.PartitionFolder); DateTime startTime = Platform.Time; DateTime lastLog = Platform.Time; int fileCounter = 0; ulong bytesCopied = 0; long instanceCountInXml = studyXml.NumberOfStudyRelatedInstances; using (ServerCommandProcessor processor = new ServerCommandProcessor("Migrate Study")) { TierMigrationContext context = new TierMigrationContext { OriginalStudyLocation = storage, Destination = newFilesystem }; string origFolder = context.OriginalStudyLocation.GetStudyPath(); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyFolder); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyInstanceUid); // don't create this directory so that it won't be backed up by MoveDirectoryCommand CopyDirectoryCommand copyDirCommand = new CopyDirectoryCommand(origFolder, newPath, delegate (string path) { // Update the progress. This is useful if the migration takes long time to complete. FileInfo file = new FileInfo(path); bytesCopied += (ulong)file.Length; fileCounter++; if (file.Extension != null && file.Extension.Equals(ServerPlatform.DicomFileExtension, StringComparison.InvariantCultureIgnoreCase)) { TimeSpan elapsed = Platform.Time - lastLog; TimeSpan totalElapsed = Platform.Time - startTime; double speedInMBPerSecond = 0; if (totalElapsed.TotalSeconds > 0) { speedInMBPerSecond = (bytesCopied / 1024f / 1024f) / totalElapsed.TotalSeconds; } if (elapsed > TimeSpan.FromSeconds(WorkQueueSettings.Instance.TierMigrationProgressUpdateInSeconds)) { #region Log Progress StringBuilder stats = new StringBuilder(); if (instanceCountInXml != 0) { float pct = (float)fileCounter / instanceCountInXml; stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2} ({3:0}% completed). Speed={4:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, pct * 100, speedInMBPerSecond); } else { stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2}. Speed={3:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, speedInMBPerSecond); } Platform.Log(LogLevel.Info, "Tier migration for study {0}: {1}", storage.StudyInstanceUid, stats.ToString()); try { using (IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { IWorkQueueEntityBroker broker = ctx.GetBroker<IWorkQueueEntityBroker>(); WorkQueueUpdateColumns parameters = new WorkQueueUpdateColumns {FailureDescription = stats.ToString()}; broker.Update(WorkQueueItem.GetKey(), parameters); ctx.Commit(); } } catch { // can't log the progress so far... just ignore it } finally { lastLog = DateTime.Now; } #endregion } } }); processor.AddCommand(copyDirCommand); DeleteDirectoryCommand delDirCommand = new DeleteDirectoryCommand(origFolder, false) {RequiresRollback = false}; processor.AddCommand(delDirCommand); TierMigrateDatabaseUpdateCommand updateDbCommand = new TierMigrateDatabaseUpdateCommand(context); processor.AddCommand(updateDbCommand); Platform.Log(LogLevel.Info, "Start migrating study {0}.. expecting {1} to be moved", storage.StudyInstanceUid, ByteCountFormatter.Format(stat.StudySize)); if (!processor.Execute()) { if (processor.FailureException != null) throw processor.FailureException; throw new ApplicationException(processor.FailureReason); } stat.DBUpdate = updateDbCommand.Statistics; stat.CopyFiles = copyDirCommand.CopySpeed; stat.DeleteDirTime = delDirCommand.Statistics; } stat.ProcessSpeed.SetData(bytesCopied); stat.ProcessSpeed.End(); Platform.Log(LogLevel.Info, "Successfully migrated study {0} from {1} to {2} in {3} [ {4} files, {5} @ {6}, DB Update={7}, Remove Dir={8}]", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.FilesystemTierEnum, TimeSpanFormatter.Format(stat.ProcessSpeed.ElapsedTime), fileCounter, ByteCountFormatter.Format(bytesCopied), stat.CopyFiles.FormattedValue, stat.DBUpdate.FormattedValue, stat.DeleteDirTime.FormattedValue); string originalPath = storage.GetStudyPath(); if (Directory.Exists(storage.GetStudyPath())) { Platform.Log(LogLevel.Info, "Original study folder could not be deleted. It must be cleaned up manually: {0}", originalPath); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, WorkQueueItem.WorkQueueTypeEnum.ToString(), 1000, GetWorkQueueContextData(WorkQueueItem), TimeSpan.Zero, "Study has been migrated to a new tier. Original study folder must be cleaned up manually: {0}", originalPath); } UpdateAverageStatistics(stat); }
private static void UpdateImage(DicomFile file, IEnumerable<BaseImageLevelUpdateCommand> commands) { using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Image According to History")) { foreach (BaseImageLevelUpdateCommand cmd in commands) { cmd.File = file; processor.AddCommand(cmd); } if (!processor.Execute()) { throw new ApplicationException(String.Format("AUTO-RECONCILE Failed: Unable to update image to match target study. Reason: {0}", processor.FailureReason), processor.FailureException); } } }
/// <summary> /// Move the file specified in the path to the incoming folder so that it will be imported again /// </summary> /// <param name="path"></param> private void MoveFileToIncomingFolder(string path) { Platform.Log(LogLevel.Debug, "Moving file {0} to incoming folder", path); // should not proceed because it may mean incomplete study if (!File.Exists(path)) throw new FileNotFoundException(string.Format("File is missing: {0}", path)); // move the file to the Incoming folder to reprocess using (var processor = new ServerCommandProcessor("Move file back to incoming folder")) { var fileInfo = new FileInfo(path); const string folder = "FromWorkQueue"; var incomingPath = GetServerPartitionIncomingFolder(); incomingPath = Path.Combine(incomingPath, "FromWorkQueue"); incomingPath = Path.Combine(incomingPath, StorageLocation.StudyInstanceUid); var createDirCommand = new CreateDirectoryCommand(incomingPath); processor.AddCommand(createDirCommand); incomingPath = Path.Combine(incomingPath, fileInfo.Name); var move = new RenameFileCommand(path, incomingPath, true); processor.AddCommand(move); if (!processor.Execute()) { throw new Exception("Unexpected error happened when trying to move file back to the incoming folder for reprocess", processor.FailureException); } Platform.Log(LogLevel.Info, "File {0} has been moved to the incoming folder.", path); } }
private void RemoveExistingImage(WorkQueueUid uid) { string path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid); if (!File.Exists(path)) return; StudyXml studyXml = StorageLocation.LoadStudyXml(); DicomFile file = new DicomFile(path); file.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); // don't need to load pixel data cause we will delete it #if DEBUG int originalInstanceCountInXml = studyXml.NumberOfStudyRelatedInstances; int originalStudyInstanceCount = Study.NumberOfStudyRelatedInstances; int originalSeriesInstanceCount = Study.Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances; #endif using (ServerCommandProcessor processor = new ServerCommandProcessor("Delete Existing Image")) { processor.AddCommand(new FileDeleteCommand(path,true)); processor.AddCommand(new RemoveInstanceFromStudyXmlCommand(StorageLocation, studyXml, file)); processor.AddCommand(new UpdateInstanceCountCommand(StorageLocation, file)); if (!processor.Execute()) { throw new ApplicationException(String.Format("Unable to remove existing image {0}", file.Filename), processor.FailureException); } } #if DEBUG Debug.Assert(!File.Exists(path)); Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCountInXml - 1); Debug.Assert(Study.Load(Study.Key).NumberOfStudyRelatedInstances == originalStudyInstanceCount - 1); Debug.Assert(Study.Load(Study.Key).Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances == originalSeriesInstanceCount - 1); #endif }
protected void RemoveFilesystem() { string path = StorageLocation.GetStudyPath(); using(var processor = new ServerCommandProcessor("Delete Filesystems Processor")) { processor.AddCommand(new DeleteDirectoryCommand(path, true)); if (_relatedDirectories!=null) { foreach (DirectoryInfo dir in _relatedDirectories) { processor.AddCommand(new DeleteDirectoryCommand(dir.FullName, true) { Log = true }); } } if (!processor.Execute()) { throw new ApplicationException( String.Format("Unexpected error when deleting study folders: {0}", processor.FailureReason), processor.FailureException); } } DirectoryUtility.DeleteIfEmpty(Path.GetDirectoryName(path)); }
protected void ProcessFile(Model.WorkQueue item, WorkQueueUid sop, string path, StudyXml studyXml, IDicomCodecFactory theCodecFactory) { DicomFile file; _instanceStats = new CompressInstanceStatistics(); _instanceStats.ProcessTime.Start(); // Use the command processor for rollback capabilities. using (ServerCommandProcessor processor = new ServerCommandProcessor("Processing WorkQueue Compress DICOM File")) { string modality = String.Empty; try { file = new DicomFile(path); _instanceStats.FileLoadTime.Start(); file.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); _instanceStats.FileLoadTime.End(); modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty); FileInfo fileInfo = new FileInfo(path); _instanceStats.FileSize = (ulong)fileInfo.Length; // Get the Patients Name for processing purposes. String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, ""); if (file.TransferSyntax.Equals(theCodecFactory.CodecTransferSyntax)) { // Delete the WorkQueueUid item processor.AddCommand(new DeleteWorkQueueUidCommand(sop)); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Warn, "Failure deleteing WorkQueueUid: {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid); Platform.Log(LogLevel.Warn, "Compression file that failed: {0}", file.Filename); } else { Platform.Log(LogLevel.Warn, "Skip compressing SOP {0}. Its current transfer syntax is {1}", file.MediaStorageSopInstanceUid, file.TransferSyntax.Name); } } else { IDicomCodec codec = theCodecFactory.GetDicomCodec(); // Create a context for applying actions from the rules engine var context = new ServerActionContext(file, StorageLocation.FilesystemKey, ServerPartition, item.StudyStorageKey); context.CommandProcessor = processor; var parms = theCodecFactory.GetCodecParameters(item.Data); var compressCommand = new DicomCompressCommand(context.Message, theCodecFactory.CodecTransferSyntax, codec, parms); processor.AddCommand(compressCommand); var save = new SaveDicomFileCommand(file.Filename, file, false); processor.AddCommand(save); // Update the StudyStream object, must be done after compression // and after the compressed image has been successfully saved var insertStudyXmlCommand = new UpdateStudyXmlCommand(file, studyXml, StorageLocation); processor.AddCommand(insertStudyXmlCommand); // Delete the WorkQueueUid item processor.AddCommand(new DeleteWorkQueueUidCommand(sop)); // Do the actual processing if (!processor.Execute()) { _instanceStats.CompressTime.Add(compressCommand.CompressTime); Platform.Log(LogLevel.Error, "Failure compressing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid); Platform.Log(LogLevel.Error, "Compression file that failed: {0}", file.Filename); throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid,processor.FailureException); } _instanceStats.CompressTime.Add(compressCommand.CompressTime); Platform.Log(ServerPlatform.InstanceLogLevel, "Compress SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid, patientsName); } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw; } finally { _instanceStats.ProcessTime.End(); _studyStats.AddSubStats(_instanceStats); _studyStats.StudyInstanceUid = StorageLocation.StudyInstanceUid; if (String.IsNullOrEmpty(modality) == false) _studyStats.Modality = modality; // Update the statistics _studyStats.NumInstances++; } } }
/// <summary> /// Creates an instance of <see cref="SopProcessingContext"/> /// </summary> /// <param name="commandProcessor">The <see cref="ServerCommandProcessor"/> used in the context</param> /// <param name="studyLocation">The <see cref="StudyStorageLocation"/> of the study being processed</param> /// <param name="uidGroup">A String value respresenting the group of SOP instances which are being processed.</param> public SopProcessingContext(ServerCommandProcessor commandProcessor, StudyStorageLocation studyLocation, string uidGroup) { _commandProcessor = commandProcessor; _studyLocation = studyLocation; _group = uidGroup; }
/// <summary> /// Create Duplicate SIQ Entry /// </summary> /// <param name="file"></param> /// <param name="location"></param> /// <param name="sourcePath"></param> /// <param name="queue"></param> /// <param name="uid"></param> public static void CreateDuplicateSIQEntry(DicomFile file, StudyStorageLocation location, string sourcePath, WorkQueue queue, WorkQueueUid uid) { Platform.Log(LogLevel.Info, "Creating Work Queue Entry for duplicate..."); String uidGroup = queue.GroupID ?? queue.GetKey().Key.ToString(); using (var commandProcessor = new ServerCommandProcessor("Insert Work Queue entry for duplicate")) { commandProcessor.AddCommand(new FileDeleteCommand(sourcePath, true)); var sopProcessingContext = new SopProcessingContext(commandProcessor, location, uidGroup); DicomProcessingResult result = Process(sopProcessingContext, file); if (!result.Successful) { FailUid(uid, true); return; } commandProcessor.AddCommand(new DeleteWorkQueueUidCommand(uid)); if (!commandProcessor.Execute()) { Platform.Log(LogLevel.Error, "Unexpected error when creating duplicate study integrity queue entry: {0}", commandProcessor.FailureReason); FailUid(uid, true); } } }
private void SaveFile(DicomFile file) { String seriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty); String sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty); String destPath = _oldStudyLocation.FilesystemPath; using (ServerCommandProcessor filesystemUpdateProcessor = new ServerCommandProcessor("Update Study")) { filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath)); destPath = Path.Combine(destPath, _partition.PartitionFolder); filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath)); destPath = Path.Combine(destPath, _oldStudyFolder); filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath)); destPath = Path.Combine(destPath, _newStudyInstanceUid); filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath)); destPath = Path.Combine(destPath, seriesInstanceUid); filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath)); destPath = Path.Combine(destPath, sopInstanceUid); destPath += ServerPlatform.DicomFileExtension; // Overwrite the prior file SaveDicomFileCommand saveCommand = new SaveDicomFileCommand(destPath, file, false); filesystemUpdateProcessor.AddCommand(saveCommand); if (_rulesEngine != null) { ServerActionContext context = new ServerActionContext(file, _oldStudyLocation.FilesystemKey, _partition, _oldStudyLocation.Key) {CommandProcessor = filesystemUpdateProcessor}; _rulesEngine.Execute(context); } if (!filesystemUpdateProcessor.Execute()) { throw new ApplicationException(String.Format("Unable to update image {0} : {1}", file.Filename, filesystemUpdateProcessor.FailureReason)); } } }
private void UpdateStudyOrDuplicates() { // StorageLocation object must be reloaded if we are overwriting the study // with info in the duplicates. bool needReload = false; switch (_processDuplicateEntry.QueueData.Action) { case ProcessDuplicateAction.OverwriteUseDuplicates: if (_processDuplicateEntry.QueueData.State.ExistingStudyUpdated) Platform.Log(LogLevel.Info, "Existing Study has been updated before"); else { Platform.Log(LogLevel.Info, "Update Existing Study w/ Duplicate Info"); _studyUpdateCommands = BuildUpdateStudyCommandsFromDuplicate(); using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Existing Study w/ Duplicate Info")) { processor.AddCommand(new UpdateStudyCommand(ServerPartition, StorageLocation, _studyUpdateCommands, ServerRuleApplyTimeEnum.SopProcessed)); if (!processor.Execute()) { throw new ApplicationException(processor.FailureReason, processor.FailureException); } needReload = true; _processDuplicateEntry.QueueData.State.ExistingStudyUpdated = true; } } break; case ProcessDuplicateAction.OverwriteUseExisting: ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder(); _duplicateUpdateCommands = new List<BaseImageLevelUpdateCommand>(); _duplicateUpdateCommands.AddRange(commandBuilder.BuildCommands<StudyMatchingMap>(StorageLocation)); PrintCommands(_duplicateUpdateCommands); break; } if (needReload) { StudyStorageLocation updatedStorageLocation; //NOTE: Make sure we are loading the storage location fro the database instead of the cache. if (!FilesystemMonitor.Instance.GetWritableStudyStorageLocation(WorkQueueItem.StudyStorageKey, out updatedStorageLocation)) { // this is odd.. we just updated it and now it's no longer writable? throw new ApplicationException("Filesystem is not writable"); } StorageLocation = updatedStorageLocation; } }
private bool UpdateNameBasedOnTheStudy(DicomFile file) { bool updated = false; string orginalPatientsNameInFile = file.DataSet[DicomTags.PatientsName].ToString(); if (_theStudy==null) { return false; } StudyComparer comparer = new StudyComparer(); ServerPartition partition = ServerPartitionMonitor.Instance.FindPartition(_theStudy.ServerPartitionKey); DifferenceCollection list = comparer.Compare(file, _theStudy, partition.GetComparisonOptions()); if (list.Count == 1) { ComparisionDifference different = list[0]; if (different.DicomTag.TagValue == DicomTags.PatientsName) { if (DicomNameUtils.LookLikeSameNames(orginalPatientsNameInFile, _theStudy.PatientsName)) { using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Patient's Name")) { SetTagCommand command = new SetTagCommand(file, DicomTags.PatientsName, orginalPatientsNameInFile, _theStudy.PatientsName); processor.AddCommand(command); if (!processor.Execute()) { throw new ApplicationException(String.Format("AUTO-CORRECTION Failed: Unable to correct the patient's name in the image. Reason: {0}", processor.FailureReason), processor.FailureException); } updated = true; } } } } return updated; }
private void DeleteDuplicate(WorkQueueUid uid) { using (ServerCommandProcessor processor = new ServerCommandProcessor("Delete Received Duplicate")) { FileInfo duplicateFile = GetDuplicateSopFile(uid); processor.AddCommand(new FileDeleteCommand(duplicateFile.FullName,true)); processor.AddCommand(new DeleteWorkQueueUidCommand(uid)); if (!processor.Execute()) { throw new ApplicationException(processor.FailureReason, processor.FailureException); } Platform.Log(ServerPlatform.InstanceLogLevel, "Discard duplicate SOP {0} in {1}", uid.SopInstanceUid, duplicateFile.FullName); } }
/// <summary> /// Archive the specified <see cref="ArchiveQueue"/> item. /// </summary> /// <param name="queueItem">The ArchiveQueue item to archive.</param> public void Run(ArchiveQueue queueItem) { using (ArchiveProcessorContext executionContext = new ArchiveProcessorContext(queueItem)) { try { if (!GetStudyStorageLocation(queueItem)) { Platform.Log(LogLevel.Error, "Unable to find readable study storage location for archival queue request {0}. Delaying request.", queueItem.Key); queueItem.FailureDescription = "Unable to find readable study storage location for archival queue request."; _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Pending, Platform.Time.AddMinutes(2)); return; } // First, check to see if we can lock the study, if not just reschedule the queue entry. if (!_storageLocation.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle)) { Platform.Log(LogLevel.Info, "Study {0} on partition {1} is currently locked, delaying archival.", _storageLocation.StudyInstanceUid, _hsmArchive.ServerPartition.Description); queueItem.FailureDescription = "Study is currently locked, delaying archival."; _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Pending, Platform.Time.AddMinutes(2)); return; } StudyIntegrityValidator validator = new StudyIntegrityValidator(); validator.ValidateStudyState("Archive", _storageLocation, StudyIntegrityValidationModes.Default); using (IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { ILockStudy studyLock = update.GetBroker<ILockStudy>(); LockStudyParameters parms = new LockStudyParameters { StudyStorageKey = queueItem.StudyStorageKey, QueueStudyStateEnum = QueueStudyStateEnum.ArchiveScheduled }; bool retVal = studyLock.Execute(parms); if (!parms.Successful || !retVal) { Platform.Log(LogLevel.Info, "Study {0} on partition {1} failed to lock, delaying archival.", _storageLocation.StudyInstanceUid, _hsmArchive.ServerPartition.Description); queueItem.FailureDescription = "Study failed to lock, delaying archival."; _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Pending, Platform.Time.AddMinutes(2)); return; } update.Commit(); } string studyFolder = _storageLocation.GetStudyPath(); string studyXmlFile = _storageLocation.GetStudyXmlPath(); // Load the study Xml file, this is used to generate the list of dicom files to archive. LoadStudyXml(studyXmlFile); DicomFile file = LoadFileFromStudyXml(); string patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, string.Empty); string patientId = file.DataSet[DicomTags.PatientId].GetString(0, string.Empty); string accessionNumber = file.DataSet[DicomTags.AccessionNumber].GetString(0, string.Empty); Platform.Log(LogLevel.Info, "Starting archival of study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} on archive {5}", _storageLocation.StudyInstanceUid, patientsName, patientId, accessionNumber, _hsmArchive.ServerPartition.Description, _hsmArchive.PartitionArchive.Description); // Use the command processor to do the archival. using (ServerCommandProcessor commandProcessor = new ServerCommandProcessor("Archive")) { _archiveXml = new XmlDocument(); // Create the study date folder string zipFilename = Path.Combine(_hsmArchive.HsmPath, _storageLocation.StudyFolder); commandProcessor.AddCommand(new CreateDirectoryCommand(zipFilename)); // Create a folder for the study zipFilename = Path.Combine(zipFilename, _storageLocation.StudyInstanceUid); commandProcessor.AddCommand(new CreateDirectoryCommand(zipFilename)); // Save the archive data in the study folder, based on a filename with a date / time stamp string filename = String.Format("{0}.zip", Platform.Time.ToString("yyyy-MM-dd-HHmm")); zipFilename = Path.Combine(zipFilename, filename); // Create the Xml data to store in the ArchiveStudyStorage table telling // where the archived study is located. XmlElement hsmArchiveElement = _archiveXml.CreateElement("HsmArchive"); _archiveXml.AppendChild(hsmArchiveElement); XmlElement studyFolderElement = _archiveXml.CreateElement("StudyFolder"); hsmArchiveElement.AppendChild(studyFolderElement); studyFolderElement.InnerText = _storageLocation.StudyFolder; XmlElement filenameElement = _archiveXml.CreateElement("Filename"); hsmArchiveElement.AppendChild(filenameElement); filenameElement.InnerText = filename; XmlElement studyInstanceUidElement = _archiveXml.CreateElement("Uid"); hsmArchiveElement.AppendChild(studyInstanceUidElement); studyInstanceUidElement.InnerText = _storageLocation.StudyInstanceUid; // Create the Zip file commandProcessor.AddCommand( new CreateStudyZipCommand(zipFilename, _studyXml, studyFolder, executionContext.TempDirectory)); // Update the database. commandProcessor.AddCommand(new InsertArchiveStudyStorageCommand(queueItem.StudyStorageKey, queueItem.PartitionArchiveKey, queueItem.GetKey(), _storageLocation.ServerTransferSyntaxKey, _archiveXml)); StudyRulesEngine studyEngine = new StudyRulesEngine(_storageLocation, _hsmArchive.ServerPartition, _studyXml); studyEngine.Apply(ServerRuleApplyTimeEnum.StudyArchived, commandProcessor); if (!commandProcessor.Execute()) { Platform.Log(LogLevel.Error, "Unexpected failure archiving study ({0}) to archive {1}: {2}, zip filename: {3}", _storageLocation.StudyInstanceUid, _hsmArchive.PartitionArchive.Description, commandProcessor.FailureReason, zipFilename); queueItem.FailureDescription = commandProcessor.FailureReason; _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Failed, Platform.Time); } else Platform.Log(LogLevel.Info, "Successfully archived study {0} on {1} to zip {2}", _storageLocation.StudyInstanceUid, _hsmArchive.PartitionArchive.Description, zipFilename); // Log the current FilesystemQueue settings _storageLocation.LogFilesystemQueue(); } } catch (StudyIntegrityValidationFailure ex) { StringBuilder error = new StringBuilder(); error.AppendLine(String.Format("Partition : {0}", ex.ValidationStudyInfo.ServerAE)); error.AppendLine(String.Format("Patient : {0}", ex.ValidationStudyInfo.PatientsName)); error.AppendLine(String.Format("Study Uid : {0}", ex.ValidationStudyInfo.StudyInstaneUid)); error.AppendLine(String.Format("Accession# : {0}", ex.ValidationStudyInfo.AccessionNumber)); error.AppendLine(String.Format("Study Date : {0}", ex.ValidationStudyInfo.StudyDate)); queueItem.FailureDescription = error.ToString(); _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Failed, Platform.Time); } catch (Exception e) { String msg = String.Format("Unexpected exception archiving study: {0} on {1}: {2}", _storageLocation.StudyInstanceUid, _hsmArchive.PartitionArchive.Description, e.Message); Platform.Log(LogLevel.Error, e, msg); queueItem.FailureDescription = msg; _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Failed, Platform.Time); } finally { // Unlock the Queue Entry using (IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { ILockStudy studyLock = update.GetBroker<ILockStudy>(); LockStudyParameters parms = new LockStudyParameters { StudyStorageKey = queueItem.StudyStorageKey, QueueStudyStateEnum = QueueStudyStateEnum.Idle }; bool retVal = studyLock.Execute(parms); if (!parms.Successful || !retVal) { Platform.Log(LogLevel.Info, "Study {0} on partition {1} is failed to unlock.", _storageLocation.StudyInstanceUid, _hsmArchive.ServerPartition.Description); } update.Commit(); } } } }
/// <summary> /// Removes all WorkQueueUids from the database and delete the corresponding DICOM files from the filesystem. /// </summary> private void ProcessWorkQueueUids() { if (Study == null) Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup (Study has not been created): Attempt #{0}. {1} unprocessed files will be removed", WorkQueueItem.FailureCount + 1, WorkQueueUidList.Count); else Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup for study {0}, Patient {1} (PatientId:{2} A#:{3}) on Partition {4}. Attempt #{5}. {6} unprocessed files will be removed", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, WorkQueueItem.FailureCount + 1, WorkQueueUidList.Count ); foreach (WorkQueueUid sop in WorkQueueUidList) { string path = GetFileStoredPath(sop); Platform.Log(LogLevel.Info, "Cleaning up {0}", path); using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid))) { // delete the file FileDeleteCommand deleteFile = new FileDeleteCommand(path, true); processor.AddCommand(deleteFile); // delete the WorkQueueUID from the database DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(sop); processor.AddCommand(deleteUid); try { // delete the directory (if empty) var fileInfo = new FileInfo(path); ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(fileInfo.Directory.FullName, false, true); processor.AddCommand(deleteDir); } catch (DirectoryNotFoundException ex) { // ignore } if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete SOP {0}", sop.SopInstanceUid), processor.FailureException); } } // Delete the base directory if it's empty var baseDir = GetBaseDirectory(sop); if (Directory.Exists(baseDir)) { using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid))) { ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(baseDir, false, true); processor.AddCommand(deleteDir); if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete {0}", baseDir), processor.FailureException); } } } } }
/// <summary> /// Inserts a <see cref="StudyIntegrityQueue"/> entry for manual reconciliation. /// </summary> /// <param name="file">The DICOM file that needs to be reconciled.</param> /// <param name="reason">The type of <see cref="StudyIntegrityQueue"/> entry to be inserted.</param> /// <param name="uid">A UID to delete on insert.</param> /// <remarks> /// A copy of the DICOM file will be stored in a special folder allocated for /// reconciliation purpose. The caller is responsible for managing the original copy. /// </remarks> public void ScheduleReconcile(DicomFile file, StudyIntegrityReasonEnum reason, WorkQueueUid uid) { Platform.CheckForNullReference(_context.StudyLocation, "_context.StudyLocation"); Platform.Log(LogLevel.Info, "Scheduling new manual reconciliation for SOP {0}", file.MediaStorageSopInstanceUid); ServerFilesystemInfo fs = FilesystemMonitor.Instance.GetFilesystemInfo(_context.StudyLocation.FilesystemKey); Platform.CheckForNullReference(fs, "fs"); ReconcileStorage reconcileStorage = new ReconcileStorage(_context.StudyLocation, _context.Group); using(ServerCommandProcessor processor = new ServerCommandProcessor("Schedule Manual Reconciliation")) { string path = reconcileStorage.GetSopInstancePath(file.DataSet[DicomTags.SopInstanceUid].ToString()); DirectoryInfo dir = new DirectoryInfo(path); if (dir.Parent != null) { CreateDirectoryCommand mkdir = new CreateDirectoryCommand(dir.Parent.FullName); processor.AddCommand(mkdir); } SaveDicomFileCommand saveFileCommand = new SaveDicomFileCommand(path, file, true); processor.AddCommand(saveFileCommand); InsertSIQCommand updateStudyCommand = new InsertSIQCommand(_context.StudyLocation, reason, file, _context.Group, reconcileStorage); processor.AddCommand(updateStudyCommand); if (uid != null) processor.AddCommand(new DeleteWorkQueueUidCommand(uid)); if (processor.Execute() == false) { throw new ApplicationException(String.Format("Unable to schedule image reconcilation : {0}", processor.FailureReason), processor.FailureException); } } }
/// <summary> /// Perform the edit. /// </summary> /// <param name="actionXml">A serialized XML representation of <see cref="SetTagCommand"/> objects</param> /// <returns></returns> public bool Edit(XmlElement actionXml) { Platform.Log(LogLevel.Info, "Starting Edit of study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); LoadExtensions(); EditStudyWorkQueueDataParser parser = new EditStudyWorkQueueDataParser(); EditStudyWorkQueueData data = parser.Parse(actionXml); using (ServerCommandProcessor processor = new ServerCommandProcessor("Web Edit Study")) { // Convert UpdateItem in the request into BaseImageLevelUpdateCommand List<BaseImageLevelUpdateCommand> updateCommands = null; if (data != null) { updateCommands = CollectionUtils.Map<Edit.UpdateItem, BaseImageLevelUpdateCommand>( data.EditRequest.UpdateEntries, delegate(Edit.UpdateItem item) { // Note: For edit, we assume each UpdateItem is equivalent to SetTagCommand return new SetTagCommand(item.DicomTag.TagValue, item.OriginalValue, item.Value); } ); } UpdateStudyCommand updateStudyCommand = new UpdateStudyCommand(ServerPartition, StorageLocation, updateCommands, ServerRuleApplyTimeEnum.SopEdited); processor.AddCommand(updateStudyCommand); // Note, this command will only insert the ArchiveQueue command if a delete doesn't exist processor.AddCommand(new InsertArchiveQueueCommand(ServerPartition.Key, StorageLocation.Key)); var context = new WebEditStudyContext { CommandProcessor = processor, EditType = data.EditRequest.EditType, OriginalStudyStorageLocation = StorageLocation, EditCommands = updateCommands, OriginalStudy = Study, OrginalPatient = Patient, UserId = data.EditRequest.UserId, Reason = data.EditRequest.Reason }; OnStudyUpdating(context); if (!processor.Execute()) { Platform.Log(LogLevel.Error, processor.FailureException, "Unexpected failure editing study: {0}", processor.FailureReason); FailureReason = processor.FailureReason; return false; } // reload the StudyStorageLocation NewStorageLocation = StudyStorageLocation.FindStorageLocations(StorageLocation.StudyStorage)[0]; context.NewStudystorageLocation = NewStorageLocation; OnStudyUpdated(context); if (updateStudyCommand.Statistics != null) StatisticsLogger.Log(LogLevel.Info, updateStudyCommand.Statistics); return true; } }
private static void UpdateUidMap(StudyStorageLocation dest, UidMapper uidMapper) { using (ServerCommandProcessor processor = new ServerCommandProcessor("Update UID Mapping Processor")) { processor.AddCommand(new SaveUidMapXmlCommand(dest, uidMapper)); if (!processor.Execute()) { throw new ApplicationException(String.Format("AUTO-RECONCILE Failed: Unable to update uid mapping. Reason: {0}", processor.FailureReason), processor.FailureException); } } }
private void UpdateExistingStudy() { Platform.Log(LogLevel.Info, "Updating existing study..."); using(ServerCommandProcessor updateProcessor = new ServerCommandProcessor("Update Study")) { UpdateStudyCommand studyUpdateCommand = new UpdateStudyCommand(Context.Partition, _destinationStudyStorage, _commands, ServerRuleApplyTimeEnum.SopProcessed); updateProcessor.AddCommand(studyUpdateCommand); if (!updateProcessor.Execute()) { throw new ApplicationException( String.Format("Unable to update existing study: {0}", updateProcessor.FailureReason)); } } }
/// <summary> /// Imports the specified <see cref="DicomMessageBase"/> object into the system. /// The object will be inserted into the <see cref="WorkQueue"/> for processing and /// if it's a duplicate, proper checks will be done and depending on the policy, it will be /// ignored, rejected or inserted into the <see cref="StudyIntegrityQueue"/> for manual intervention. /// </summary> /// <param name="message">The DICOM object to be imported.</param> /// <returns>An instance of <see cref="DicomProcessingResult"/> that describes the result of the processing.</returns> /// <exception cref="DicomDataException">Thrown when the DICOM object contains invalid data</exception> public DicomProcessingResult Import(DicomMessageBase message) { Platform.CheckForNullReference(message, "message"); String studyInstanceUid = message.DataSet[DicomTags.StudyInstanceUid].GetString(0, string.Empty); String seriesInstanceUid = message.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty); String sopInstanceUid = message.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty); String accessionNumber = message.DataSet[DicomTags.AccessionNumber].GetString(0, string.Empty); String patientsName = message.DataSet[DicomTags.PatientsName].GetString(0, string.Empty); DicomFile file = null; // Scrub the name for invalid characters. string newName = XmlUtils.XmlCharacterScrub(patientsName); if (!newName.Equals(patientsName)) message.DataSet[DicomTags.PatientsName].SetStringValue(newName); var result = new DicomProcessingResult { Successful = true, StudyInstanceUid = studyInstanceUid, SeriesInstanceUid = seriesInstanceUid, SopInstanceUid = sopInstanceUid, AccessionNumber = accessionNumber }; try { Validate(message); } catch (DicomDataException e) { result.SetError(DicomStatuses.ProcessingFailure, e.Message); return result; } // Use the command processor for rollback capabilities. using (var commandProcessor = new ServerCommandProcessor(String.Format("Processing Sop Instance {0}", sopInstanceUid))) { try { string failureMessage; StudyStorageLocation studyLocation = GetWritableOnlineStorage(message); // GetWritableOnlineStorage should throw an exception if the study location cannot be found. Platform.CheckForNullReference(studyLocation, "studyLocation"); if (!studyLocation.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle) && (!studyLocation.QueueStudyStateEnum.Equals(QueueStudyStateEnum.ProcessingScheduled))) { failureMessage = String.Format("Study {0} on partition {1} is being processed: {2}, can't accept new images.", studyLocation.StudyInstanceUid, _context.Partition.Description, studyLocation.QueueStudyStateEnum.Description); result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage); return result; } if (studyLocation.StudyStatusEnum.Equals(StudyStatusEnum.OnlineLossy)) { if (studyLocation.IsLatestArchiveLossless) { result.DicomStatus = DicomStatuses.StorageStorageOutOfResources; failureMessage = String.Format("Study {0} on partition {1} can't accept new images due to lossy compression of the study. Restoring study.", studyLocation.StudyInstanceUid, _context.Partition.Description); Platform.Log(LogLevel.Error, failureMessage); if (ServerHelper.InsertRestoreRequest(studyLocation) == null) { Platform.Log(LogLevel.Warn, "Unable to insert Restore Request for Study"); } result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage); result.RestoreRequested = true; return result; } } String path = studyLocation.FilesystemPath; const string extension = null; String finalDest = studyLocation.GetSopInstancePath(seriesInstanceUid, sopInstanceUid); file = ConvertToDicomFile(message, finalDest, _context.SourceAE); if (HasUnprocessedCopy(studyLocation.Key, seriesInstanceUid, sopInstanceUid)) { var accept = false; // This is a special case: #10569 // Allow user to revive an orphaned study by reprocessing the files found in the filesystem if (File.Exists(finalDest)) { accept = DuplicatePolicy.IsParitionDuplicatePolicyOverridden(studyLocation); } if (!accept) { failureMessage = string.Format("Another copy of the SOP Instance was received but has not been processed: {0}", sopInstanceUid); result.SetError(DicomStatuses.DuplicateSOPInstance, failureMessage); return result; } } if (File.Exists(finalDest)) { result = HandleDuplicate(sopInstanceUid, studyLocation, commandProcessor, file); if (!result.Successful) return result; } else { HandleNonDuplicate(seriesInstanceUid, sopInstanceUid, studyLocation, commandProcessor, file, path, false, extension); } if (commandProcessor.Execute()) { result.DicomStatus = DicomStatuses.Success; } else { failureMessage = String.Format("Failure processing message: {0}. Sending failure status.", commandProcessor.FailureReason); result.SetError(DicomStatuses.ProcessingFailure, failureMessage); // processor already rolled back return result; } } catch(NoWritableFilesystemException) { String failureMessage = String.Format("Unable to process image, no writable filesystem found for Study UID {0}.", sopInstanceUid); commandProcessor.Rollback(); result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage); } catch(StudyIsNearlineException e) { String failureMessage = e.RestoreRequested ? String.Format("{0}. Restore has been requested.", e.Message) : e.Message; Platform.Log(LogLevel.Error, failureMessage); commandProcessor.Rollback(); result.SetError(DicomStatuses.ProcessingFailure, failureMessage); } catch (FilesystemNotWritableException) { commandProcessor.Rollback(); string folder; if (!FilesystemMonitor.Instance.GetWriteableIncomingFolder(_context.Partition, out folder)) { String failureMessage = String.Format("Unable to process image, study storage location is missing or not writeable: {0}.", sopInstanceUid); result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage); return result; } if (file == null) file = ConvertToDicomFile(message, string.Empty, _context.SourceAE); if (!SaveToFolder(folder, sopInstanceUid, studyInstanceUid, file)) { String failureMessage = String.Format("Study storage location not writeable and no writeable incoming folder: {0}.", sopInstanceUid); result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage); return result; } Platform.Log(LogLevel.Info, "Saved existing SOP without writeable storage location to {0} folder: {1}", FilesystemMonitor.ImportDirectorySuffix, sopInstanceUid); result.DicomStatus = DicomStatuses.Success; return result; } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", commandProcessor.Description); commandProcessor.Rollback(); result.SetError(result.DicomStatus ?? DicomStatuses.ProcessingFailure, e.Message); } } return result; }
private static void HandleNonDuplicate(string seriesInstanceUid, string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomFile file, string path, bool dupImage, string extension) { commandProcessor.AddCommand(new CreateDirectoryCommand(path)); path = Path.Combine(path, studyLocation.PartitionFolder); commandProcessor.AddCommand(new CreateDirectoryCommand(path)); path = Path.Combine(path, studyLocation.StudyFolder); commandProcessor.AddCommand(new CreateDirectoryCommand(path)); path = Path.Combine(path, studyLocation.StudyInstanceUid); commandProcessor.AddCommand(new CreateDirectoryCommand(path)); path = Path.Combine(path, seriesInstanceUid); commandProcessor.AddCommand(new CreateDirectoryCommand(path)); path = Path.Combine(path, sopInstanceUid); path += ServerPlatform.DicomFileExtension; commandProcessor.AddCommand(new SaveDicomFileCommand(path, file, true)); commandProcessor.AddCommand( new UpdateWorkQueueCommand(file, studyLocation, dupImage)); #region SPECIAL CODE FOR TESTING if (Diagnostics.Settings.SimulateFileCorruption) { commandProcessor.AddCommand(new CorruptDicomFileCommand(path)); } #endregion }
private static bool SaveToFolder(string folder, string sopInstanceUid, string studyInstanceUid, DicomFile file) { using (ServerCommandProcessor commandProcessor = new ServerCommandProcessor(String.Format("Saving Sop Instance to Incoming {0}", sopInstanceUid))) { string path = Path.Combine(folder, studyInstanceUid); commandProcessor.AddCommand(new CreateDirectoryCommand(path)); path = Path.Combine(path, sopInstanceUid); path += ServerPlatform.DicomFileExtension; if (File.Exists(path)) return false; commandProcessor.AddCommand(new SaveDicomFileCommand(path, file, true)); return commandProcessor.Execute(); } }
private void ProcessSeriesLevelDelete(Model.WorkQueue item) { // ensure the Study is loaded. Study study = StorageLocation.Study; Platform.CheckForNullReference(study, "Study record doesn't exist"); Platform.Log(LogLevel.Info, "Processing Series Level Deletion for Study {0}, A#: {1}", study.StudyInstanceUid, study.AccessionNumber); _seriesToDelete = new List<Series>(); bool completed = false; try { // Load the list of Series to be deleted from the WorkQueueUid LoadUids(item); // Go through the list of series and add commands // to delete each of them. It's all or nothing. using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting Series from study {0}, A#:{1}, Patient: {2}, ID:{3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId))) { StudyXml studyXml = StorageLocation.LoadStudyXml(); IDictionary<string, Series> existingSeries = StorageLocation.Study.Series; // Add commands to delete the folders and update the xml foreach (WorkQueueUid uid in WorkQueueUidList) { // Delete from study XML if (studyXml.Contains(uid.SeriesInstanceUid)) { //Note: DeleteDirectoryCommand doesn't throw exception if the folder doesn't exist var xmlUpdate = new RemoveSeriesFromStudyXml(studyXml, uid.SeriesInstanceUid); processor.AddCommand(xmlUpdate); } // Delete from filesystem string path = StorageLocation.GetSeriesPath(uid.SeriesInstanceUid); if (Directory.Exists(path)) { var delDir = new DeleteDirectoryCommand(path, true); processor.AddCommand(delDir); } } // flush the updated xml to disk processor.AddCommand(new SaveXmlCommand(studyXml, StorageLocation)); // Update the db.. NOTE: these commands are executed at the end. foreach (WorkQueueUid uid in WorkQueueUidList) { // Delete from DB WorkQueueUid queueUid = uid; Series theSeries = existingSeries[queueUid.SeriesInstanceUid]; if (theSeries!=null) { _seriesToDelete.Add(theSeries); var delSeries = new DeleteSeriesFromDBCommand(StorageLocation, theSeries); processor.AddCommand(delSeries); delSeries.Executing += DeleteSeriesFromDbExecuting; } else { // Series doesn't exist Platform.Log(LogLevel.Info, "Series {0} is invalid or no longer exists", uid.SeriesInstanceUid); } // The WorkQueueUid must be cleared before the entry can be removed from the queue var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteUid); // Force a re-archival if necessary processor.AddCommand(new InsertArchiveQueueCommand(item.ServerPartitionKey, item.StudyStorageKey)); } if (!processor.Execute()) throw new ApplicationException( String.Format("Error occurred when series from Study {0}, A#: {1}", study.StudyInstanceUid, study.AccessionNumber), processor.FailureException); else { foreach (Series series in _seriesToDelete) { OnSeriesDeleted(series); } } } completed = true; } finally { if (completed) { OnCompleted(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } } }
private void InsertInstance(DicomFile file, StudyXml stream, WorkQueueUid uid, string deleteFile) { using (ServerCommandProcessor processor = new ServerCommandProcessor("Processing WorkQueue DICOM file")) { EventsHelper.Fire(OnInsertingSop, this, new SopInsertingEventArgs {Processor = processor }); InsertInstanceCommand insertInstanceCommand = null; InsertStudyXmlCommand insertStudyXmlCommand = null; String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, String.Empty); _modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty); if (_context.UpdateCommands.Count > 0) { foreach (BaseImageLevelUpdateCommand command in _context.UpdateCommands) { command.File = file; processor.AddCommand(command); } } try { // Create a context for applying actions from the rules engine ServerActionContext context = new ServerActionContext(file, _context.StorageLocation.FilesystemKey, _context.Partition, _context.StorageLocation.Key); context.CommandProcessor = processor; _context.SopCompressionRulesEngine.Execute(context); String seriesUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty); String sopUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty); String finalDest = _context.StorageLocation.GetSopInstancePath(seriesUid, sopUid); if (_context.UpdateCommands.Count > 0) { processor.AddCommand(new SaveDicomFileCommand(_context.StorageLocation, file, file.Filename != finalDest)); } else if (file.Filename != finalDest || processor.CommandCount > 0) { // Have to be careful here about failure on exists vs. not failing on exists // because of the different use cases of the importer. // save the file in the study folder, or if its been compressed processor.AddCommand(new SaveDicomFileCommand(finalDest, file, file.Filename != finalDest)); } // Update the StudyStream object insertStudyXmlCommand = new InsertStudyXmlCommand(file, stream, _context.StorageLocation); processor.AddCommand(insertStudyXmlCommand); // Have the rules applied during the command processor, and add the objects. processor.AddCommand(new ApplySopRulesCommand(context,_context.SopProcessedRulesEngine)); // If specified, delete the file if (deleteFile != null) processor.AddCommand(new FileDeleteCommand(deleteFile, true)); // Insert into the database, but only if its not a duplicate so the counts don't get off insertInstanceCommand = new InsertInstanceCommand(file, _context.StorageLocation); processor.AddCommand(insertInstanceCommand); // Do a check if the StudyStatus value should be changed in the StorageLocation. This // should only occur if the object has been compressed in the previous steps. processor.AddCommand(new UpdateStudyStatusCommand(_context.StorageLocation, file)); if (uid!=null) processor.AddCommand(new DeleteWorkQueueUidCommand(uid)); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure processing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid); Platform.Log(LogLevel.Error, "File that failed processing: {0}", file.Filename); throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException); } Platform.Log(ServerPlatform.InstanceLogLevel, "Processed SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid, patientsName); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } finally { if (insertInstanceCommand != null && insertInstanceCommand.Statistics.IsSet) _instanceStats.InsertDBTime.Add(insertInstanceCommand.Statistics); if (insertStudyXmlCommand != null && insertStudyXmlCommand.Statistics.IsSet) _instanceStats.InsertStreamTime.Add(insertStudyXmlCommand.Statistics); } } }