Ejemplo n.º 1
0
		protected override void OnExecute(CommandProcessor theProcessor)
		{
			Platform.CheckForNullReference(Context, "Context");
			Platform.CheckForNullReference(Context.ReconcileWorkQueueData, "Context.ReconcileWorkQueueData");

			foreach (WorkQueueUid uid in Context.WorkQueueUidList)
			{

			    string imagePath = GetReconcileUidPath(uid);

				try
				{
					using (var processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid)))
					{
						var deleteFile = new FileDeleteCommand(imagePath, true);
						var deleteUid = new DeleteWorkQueueUidCommand(uid);
						processor.AddCommand(deleteFile);
						processor.AddCommand(deleteUid);
						Platform.Log(ServerPlatform.InstanceLogLevel, deleteFile.ToString());
						if (!processor.Execute())
						{
							throw new Exception(String.Format("Unable to discard image {0}", uid.SopInstanceUid));
						}
					}
				}
				catch (Exception e)
				{
					Platform.Log(LogLevel.Error, e, "Unexpected exception discarding file: {0}", imagePath);
					SopInstanceProcessor.FailUid(uid, true);
				}
			}
		}
Ejemplo n.º 2
0
		/// <summary>
		/// Do the actual rebuild.  On error, will attempt to reprocess the study.
		/// </summary>
		public void RebuildXml()
		{
			string rootStudyPath = _location.GetStudyPath();

			try
			{
				using (ServerCommandProcessor processor = new ServerCommandProcessor("Rebuild XML"))
				{
				    var command = new RebuildStudyXmlCommand(_location.StudyInstanceUid, rootStudyPath);
					processor.AddCommand(command);

                    var updateCommand = new UpdateStudySizeInDBCommand(_location, command);
                    processor.AddCommand(updateCommand);

					if (!processor.Execute())
					{
						throw new ApplicationException(processor.FailureReason, processor.FailureException);
					}

                    Study theStudy = _location.Study;
                    if (theStudy.NumberOfStudyRelatedInstances != command.StudyXml.NumberOfStudyRelatedInstances)
                    {
                        // We rebuilt, but the counts don't match.
                        throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount,
                                                                  new ValidationStudyInfo(theStudy,
                                                                                          _location.ServerPartition),
                                                                  string.Format(
                                                                      "Database study count {0} does not match study xml {1}",
                                                                      theStudy.NumberOfStudyRelatedInstances,
                                                                      command.StudyXml.NumberOfStudyRelatedInstances));
                    }

					Platform.Log(LogLevel.Info, "Completed reprocessing Study XML file for study {0}", _location.StudyInstanceUid);
				}
			}
			catch (Exception e)
			{
				Platform.Log(LogLevel.Error, e, "Unexpected error when rebuilding study XML for directory: {0}",
				             _location.FilesystemPath);
				StudyReprocessor reprocessor = new StudyReprocessor();
                try
                {
                    WorkQueue reprocessEntry = reprocessor.ReprocessStudy("Rebuild StudyXml", _location, Platform.Time);
                    if (reprocessEntry != null)
                    {
                        Platform.Log(LogLevel.Error, "Failure attempting to reprocess study: {0}",
                                     _location.StudyInstanceUid);
                    }
                    else
                        Platform.Log(LogLevel.Error, "Inserted reprocess request for study: {0}",
                                     _location.StudyInstanceUid);
                }
                catch(InvalidStudyStateOperationException ex)
                {
                    Platform.Log(LogLevel.Error, "Failure attempting to reprocess study {0}: {1}",
                                     _location.StudyInstanceUid, ex.Message);
                }
			}
		}
		/// <summary>
		/// Creates an instance of <see cref="SopInstanceProcessorContext"/>
		/// </summary>
		/// <param name="commandProcessor">The <see cref="ServerCommandProcessor"/> used in the context</param>
		/// <param name="studyLocation">The <see cref="StudyStorageLocation"/> of the study being processed</param>
		/// <param name="uidGroup">A String value respresenting the group of SOP instances which are being processed.</param>
		/// <param name="request">An external request that may have triggered this item.</param>
		public SopInstanceProcessorContext(ServerCommandProcessor commandProcessor, StudyStorageLocation studyLocation,
									string uidGroup, ExternalRequestQueue request = null)
		{
			_commandProcessor = commandProcessor;
			_studyLocation = studyLocation;
			_group = uidGroup;
			_request = request;
		}
		private void HandleNonDuplicateFile(string seriesInstanceUid, string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomMessageBase message, string sourcePath, string path, bool dupImage, StudyProcessWorkQueueData data)
		{
			commandProcessor.AddCommand(new CreateDirectoryCommand(path));

			path = Path.Combine(path, studyLocation.PartitionFolder);
			commandProcessor.AddCommand(new CreateDirectoryCommand(path));

			path = Path.Combine(path, studyLocation.StudyFolder);
			commandProcessor.AddCommand(new CreateDirectoryCommand(path));

			path = Path.Combine(path, studyLocation.StudyInstanceUid);
			commandProcessor.AddCommand(new CreateDirectoryCommand(path));

			path = Path.Combine(path, seriesInstanceUid);
			commandProcessor.AddCommand(new CreateDirectoryCommand(path));

			path = Path.Combine(path, sopInstanceUid);
			path += ServerPlatform.DicomFileExtension;

			commandProcessor.AddCommand(new RenameFileCommand(sourcePath, path, true));

			WorkQueueUidData uidData = null;
			if (_context.Request != null && !string.IsNullOrEmpty(_context.Request.OperationToken))
			{
				uidData = new WorkQueueUidData
				{
					OperationToken = _context.Request.OperationToken
				};
			}

			commandProcessor.AddCommand(
				new UpdateWorkQueueCommand(message, studyLocation, dupImage, data, uidData, _context.Request));

			#region SPECIAL CODE FOR TESTING
			if (Diagnostics.Settings.SimulateFileCorruption)
			{
				commandProcessor.AddCommand(new CorruptDicomFileCommand(path));
			}
			#endregion
		}
		private static bool SaveToFolder(string folder, string sopInstanceUid, string studyInstanceUid, DicomFile file)
		{
			using (var commandProcessor =
				new ServerCommandProcessor(String.Format("Saving Sop Instance to Incoming {0}", sopInstanceUid)))
			{
				string path = Path.Combine(folder, studyInstanceUid);
				commandProcessor.AddCommand(new CreateDirectoryCommand(path));

				path = Path.Combine(path, sopInstanceUid);
				path += ServerPlatform.DicomFileExtension;

				if (File.Exists(path))
					return false;

				commandProcessor.AddCommand(new SaveDicomFileCommand(path, file, true));

				return commandProcessor.Execute();
			}
		}
		private void InsertInstance(DicomFile file, StudyXml stream, WorkQueueUid uid, string deleteFile, SopInstanceProcessorSopType sopType)
		{
			using (var processor = new ServerCommandProcessor("Processing WorkQueue DICOM file"))
			{
			    EventsHelper.Fire(OnInsertingSop, this, new SopInsertingEventArgs {Processor = processor });

				InsertInstanceCommand insertInstanceCommand = null;
				InsertStudyXmlCommand insertStudyXmlCommand = null;

				String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, String.Empty);
				_modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty);

				if (_context.UpdateCommands.Count > 0)
				{
					foreach (BaseImageLevelUpdateCommand command in _context.UpdateCommands)
					{
						command.File = file;
						processor.AddCommand(command);
					}
				}
				try
				{
					// Create a context for applying actions from the rules engine
					ServerActionContext context =
						new ServerActionContext(file, _context.StorageLocation.FilesystemKey, _context.Partition, _context.StorageLocation.Key);
					context.CommandProcessor = processor;

					_context.SopCompressionRulesEngine.Execute(context);
                    String seriesUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty);
                    String sopUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty);
                    String finalDest = _context.StorageLocation.GetSopInstancePath(seriesUid, sopUid);

					if (_context.UpdateCommands.Count > 0)
					{
						processor.AddCommand(new SaveDicomFileCommand(_context.StorageLocation, file, file.Filename != finalDest));
					}
					else if (file.Filename != finalDest || processor.CommandCount > 0)
                    {
						// Have to be careful here about failure on exists vs. not failing on exists
						// because of the different use cases of the importer.
                        // save the file in the study folder, or if its been compressed
						processor.AddCommand(new SaveDicomFileCommand(finalDest, file, file.Filename != finalDest));
                    }

					// Update the StudyStream object
					insertStudyXmlCommand = new InsertStudyXmlCommand(file, stream, _context.StorageLocation);
					processor.AddCommand(insertStudyXmlCommand);

					// Have the rules applied during the command processor, and add the objects.
					processor.AddCommand(new ApplySopRulesCommand(context,_context.SopProcessedRulesEngine));

					// If specified, delete the file
					if (deleteFile != null)
						processor.AddCommand(new FileDeleteCommand(deleteFile, true));

					// Insert into the database, but only if its not a duplicate so the counts don't get off
					insertInstanceCommand = new InsertInstanceCommand(file, _context.StorageLocation);
					processor.AddCommand(insertInstanceCommand);
					
					// Do a check if the StudyStatus value should be changed in the StorageLocation.  This
					// should only occur if the object has been compressed in the previous steps.
					processor.AddCommand(new UpdateStudyStatusCommand(_context.StorageLocation, file));

					if (uid!=null)
						processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

					// Do the actual processing
					if (!processor.Execute())
					{
						Platform.Log(LogLevel.Error, "Failure processing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid);
						Platform.Log(LogLevel.Error, "File that failed processing: {0}", file.Filename);
						throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException);
					}
					Platform.Log(ServerPlatform.InstanceLogLevel, "Processed SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid, patientsName);

					// Fire NewSopEventArgs or UpdateSopEventArgs Event
					// Know its a duplicate if we have to delete the duplicate object
					if (sopType == SopInstanceProcessorSopType.NewSop)
						EventManager.FireEvent(this, new NewSopEventArgs { File = file, ServerPartitionEntry = _context.Partition, WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize });
					else if (sopType == SopInstanceProcessorSopType.UpdatedSop)
						EventManager.FireEvent(this, new UpdateSopEventArgs {File = file,ServerPartitionEntry = _context.Partition,WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize});
				}
				catch (Exception e)
				{
					Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}.  Rolling back operation.",
					             processor.Description);
					processor.Rollback();
					if (sopType == SopInstanceProcessorSopType.NewSop)
						EventManager.FireEvent(this, new FailedNewSopEventArgs { File = file, ServerPartitionEntry = _context.Partition, WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize, FailureMessage = e.Message });
					else
						EventManager.FireEvent(this, new FailedUpdateSopEventArgs { File = file, ServerPartitionEntry = _context.Partition, WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize, FailureMessage = e.Message });
					throw new ApplicationException("Unexpected exception when processing file.", e);
				}
				finally
				{
					if (insertInstanceCommand != null && insertInstanceCommand.Statistics.IsSet)
						_instanceStats.InsertDBTime.Add(insertInstanceCommand.Statistics);
					if (insertStudyXmlCommand != null && insertStudyXmlCommand.Statistics.IsSet)
						_instanceStats.InsertStreamTime.Add(insertStudyXmlCommand.Statistics);
				}
			}
		}
Ejemplo n.º 7
0
		public void RestoreNearlineStudy(RestoreQueue queueItem, string zipFile, string studyFolder)
		{
            ServerFilesystemInfo fs = _hsmArchive.Selector.SelectFilesystem();
			if (fs == null)
			{
				DateTime scheduleTime = Platform.Time.AddMinutes(5);
				Platform.Log(LogLevel.Error, "No writeable filesystem for restore, rescheduling restore request to {0}", scheduleTime);
				queueItem.FailureDescription = "No writeable filesystem for restore, rescheduling restore request";
				_hsmArchive.UpdateRestoreQueue(queueItem, RestoreQueueStatusEnum.Pending, scheduleTime);
				return;
			}

            string destinationFolder = Path.Combine(fs.Filesystem.FilesystemPath, _hsmArchive.ServerPartition.PartitionFolder);

		    StudyStorageLocation restoredLocation = null;
			try
			{
				using (var processor = new ServerCommandProcessor("HSM Restore Offline Study"))
				{
					processor.AddCommand(new CreateDirectoryCommand(destinationFolder));
					destinationFolder = Path.Combine(destinationFolder, studyFolder);
					processor.AddCommand(new CreateDirectoryCommand(destinationFolder));
					destinationFolder = Path.Combine(destinationFolder, _studyStorage.StudyInstanceUid);
					processor.AddCommand(new CreateDirectoryCommand(destinationFolder));
					processor.AddCommand(new ExtractZipCommand(zipFile, destinationFolder));

					// We rebuild the StudyXml, in case any settings or issues have happened since archival
					processor.AddCommand(new RebuildStudyXmlCommand(_studyStorage.StudyInstanceUid, destinationFolder));

                    // Apply the rules engine.
					var context =
						new ServerActionContext(null, fs.Filesystem.GetKey(), _hsmArchive.ServerPartition,
						                        queueItem.StudyStorageKey, processor);
					processor.AddCommand(
						new ApplyRulesCommand(destinationFolder, _studyStorage.StudyInstanceUid, context));

					// Do the actual insert into the DB
					var insertStorageCommand = new InsertFilesystemStudyStorageCommand(
													_hsmArchive.PartitionArchive.ServerPartitionKey,
						                            _studyStorage.StudyInstanceUid,
						                            studyFolder,
						                            fs.Filesystem.GetKey(), _syntax);
					processor.AddCommand(insertStorageCommand);

					if (!processor.Execute())
					{
						Platform.Log(LogLevel.Error, "Unexpected error processing restore request for {0} on archive {1}",
						             _studyStorage.StudyInstanceUid, _hsmArchive.PartitionArchive.Description);
						queueItem.FailureDescription = processor.FailureReason;
						_hsmArchive.UpdateRestoreQueue(queueItem, RestoreQueueStatusEnum.Failed, Platform.Time);
					}
					else
					{
					    restoredLocation = insertStorageCommand.Location;

						// Unlock the Queue Entry
						using (
							IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
						{
							bool retVal = _hsmArchive.UpdateRestoreQueue(update, queueItem, RestoreQueueStatusEnum.Completed, Platform.Time.AddSeconds(60));
							var studyLock = update.GetBroker<ILockStudy>();
						    var parms = new LockStudyParameters
						                    {
						                        StudyStorageKey = queueItem.StudyStorageKey,
						                        QueueStudyStateEnum = QueueStudyStateEnum.Idle
						                    };
							retVal = retVal && studyLock.Execute(parms);
							if (!parms.Successful || !retVal)
							{
								string message =
									String.Format("Study {0} on partition {1} failed to unlock.", _studyStorage.StudyInstanceUid,
									              _hsmArchive.ServerPartition.Description);
								Platform.Log(LogLevel.Info, message);
								throw new ApplicationException(message);
							}
							update.Commit();

							Platform.Log(LogLevel.Info, "Successfully restored study: {0} on archive {1}", _studyStorage.StudyInstanceUid,
										 _hsmArchive.PartitionArchive.Description);

                            OnStudyRestored(restoredLocation);
						}
					}
				}
			}
            catch(StudyIntegrityValidationFailure ex)
            {
                Debug.Assert(restoredLocation != null);
                // study has been restored but it seems corrupted. Need to reprocess it.
                ReprocessStudy(restoredLocation, ex.Message);
            }
			catch (Exception e)
			{
				Platform.Log(LogLevel.Error, e, "Unexpected exception processing restore request for {0} on archive {1}",
							 _studyStorage.StudyInstanceUid, _hsmArchive.PartitionArchive.Description);
				_hsmArchive.UpdateRestoreQueue(queueItem, RestoreQueueStatusEnum.Failed, Platform.Time);
			}
		}
Ejemplo n.º 8
0
        private void RestoreOnlineStudy(RestoreQueue queueItem, string zipFile, string destinationFolder)
		{
			try
			{
				using (var processor = new ServerCommandProcessor("HSM Restore Online Study"))
				{
				    var zipService = Platform.GetService<IZipService>();
					using (var zipWriter = zipService.OpenWrite(zipFile))
					{
                        foreach (string file in zipWriter.EntryFileNames)
						{
							processor.AddCommand(new ExtractZipFileAndReplaceCommand(zipFile, file, destinationFolder));
						}
					}

					// We rebuild the StudyXml, in case any settings or issues have happened since archival
					processor.AddCommand(new RebuildStudyXmlCommand(_location.StudyInstanceUid, destinationFolder));

					StudyStatusEnum status;

					if (_syntax.Encapsulated && _syntax.LosslessCompressed)
						status = StudyStatusEnum.OnlineLossless;
					else if (_syntax.Encapsulated && _syntax.LossyCompressed)
						status = StudyStatusEnum.OnlineLossy;
					else
						status = StudyStatusEnum.Online;

					processor.AddCommand(new UpdateStudyStateCommand(_location, status, _serverSyntax));

					// Apply the rules engine.
					var context =
						new ServerActionContext(null, _location.FilesystemKey, _hsmArchive.ServerPartition,
												queueItem.StudyStorageKey, processor);
					processor.AddCommand(
						new ApplyRulesCommand(destinationFolder, _location.StudyInstanceUid, context));

					if (!processor.Execute())
					{
						Platform.Log(LogLevel.Error, "Unexpected error processing restore request for {0} on archive {1}",
									 _location.StudyInstanceUid, _hsmArchive.PartitionArchive.Description);
						queueItem.FailureDescription = processor.FailureReason;
						_hsmArchive.UpdateRestoreQueue(queueItem, RestoreQueueStatusEnum.Failed, Platform.Time);
					}
					else
					{
						// Unlock the Queue Entry and set to complete
						using (IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
						{
							_hsmArchive.UpdateRestoreQueue(update, queueItem, RestoreQueueStatusEnum.Completed, Platform.Time.AddSeconds(60));
							var studyLock = update.GetBroker<ILockStudy>();
							var parms = new LockStudyParameters
							                            	{
							                            		StudyStorageKey = queueItem.StudyStorageKey,
							                            		QueueStudyStateEnum = QueueStudyStateEnum.Idle
							                            	};
							bool retVal = studyLock.Execute(parms);
							if (!parms.Successful || !retVal)
							{
								Platform.Log(LogLevel.Info, "Study {0} on partition {1} failed to unlock.", _location.StudyInstanceUid,
											 _hsmArchive.ServerPartition.Description);
							}

							update.Commit();

							Platform.Log(LogLevel.Info, "Successfully restored study: {0} on archive {1}", _location.StudyInstanceUid,
										 _hsmArchive.PartitionArchive.Description);

						    _location = ReloadStorageLocation();
                            OnStudyRestored(_location);
						}
					}
				}
			}
            catch (StudyIntegrityValidationFailure ex)
            {
                // study has been restored but it seems corrupted. Need to reprocess it.
                ReprocessStudy(_location, ex.Message);
            }
            catch (Exception e)
			{
				Platform.Log(LogLevel.Error, e, "Unexpected exception processing restore request for {0} on archive {1}",
							 _location.StudyInstanceUid, _hsmArchive.PartitionArchive.Description);
				queueItem.FailureDescription = e.Message;
				_hsmArchive.UpdateRestoreQueue(queueItem, RestoreQueueStatusEnum.Failed, Platform.Time);
			}
		}
        private static void RemoveWorkQueueUid(WorkQueueUid uid, string fileToDelete)
        {
            using (var processor = new ServerCommandProcessor("Remove Work Queue Uid"))
            {
                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
                if (String.IsNullOrEmpty(fileToDelete) == false)
                {
                    processor.AddCommand(new FileDeleteCommand(fileToDelete, true));

                }

                if (!processor.Execute())
                {
                    String error = String.Format("Unable to delete Work Queue Uid {0}: {1}", uid.Key, processor.FailureReason);
                    Platform.Log(LogLevel.Error, error);
                    throw new ApplicationException(error, processor.FailureException);
                }
            }

        }
        /// <summary>
        /// Removes all WorkQueueUids from the database and delete the corresponding DICOM files from the filesystem.
        /// </summary>
        private void ProcessWorkQueueUids()
        {
            if (Study == null)
                Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup (Study has not been created): Attempt #{0}. {1} unprocessed files will be removed",
                                        WorkQueueItem.FailureCount + 1,
                                        WorkQueueUidList.Count);
            else
                Platform.Log(LogLevel.Info,
                             "Begin StudyProcess Cleanup for study {0},  Patient {1} (PatientId:{2} A#:{3}) on Partition {4}. Attempt #{5}. {6} unprocessed files will be removed",
                             Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                             Study.AccessionNumber, ServerPartition.Description,
                             WorkQueueItem.FailureCount + 1,
                             WorkQueueUidList.Count
                             );

            

            foreach (WorkQueueUid sop in WorkQueueUidList)
            {
                string path = GetFileStoredPath(sop);

                Platform.Log(LogLevel.Info, "Cleaning up {0}", path);

                using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid)))
                {
                    // delete the file
                    FileDeleteCommand deleteFile = new FileDeleteCommand(path, true);
                    processor.AddCommand(deleteFile);

                    // delete the WorkQueueUID from the database
                    DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(sop);
                    processor.AddCommand(deleteUid);

                    try
                    {
                        // delete the directory (if empty)
                        var fileInfo = new FileInfo(path);
                        ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(fileInfo.Directory.FullName, false, true);
                        processor.AddCommand(deleteDir);
                    }
                    catch (DirectoryNotFoundException)
                    {
                        // ignore
                    }

                    if (!processor.Execute())
                    {
                        throw new Exception(String.Format("Unable to delete SOP {0}", sop.SopInstanceUid), processor.FailureException);
                    }
                }

                // Delete the base directory if it's empty
                var baseDir = GetBaseDirectory(sop);
                if (Directory.Exists(baseDir))
                {
                    using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid)))
                    {
                        ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(baseDir, false, true);
                        processor.AddCommand(deleteDir);

                        if (!processor.Execute())
                        {
                            throw new Exception(String.Format("Unable to delete {0}", baseDir), processor.FailureException);
                        }
                    }
                }

            }
			
        }
 private void DeleteDuplicate(WorkQueueUid uid)
 {
     using (ServerCommandProcessor processor = new ServerCommandProcessor("Delete Received Duplicate"))
     {
         FileInfo duplicateFile = GetDuplicateSopFile(uid);
         processor.AddCommand(new FileDeleteCommand(duplicateFile.FullName,true));
         processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
         if (!processor.Execute())
         {
             throw new ApplicationException(processor.FailureReason, processor.FailureException);
         }
     	Platform.Log(ServerPlatform.InstanceLogLevel, "Discard duplicate SOP {0} in {1}", uid.SopInstanceUid, duplicateFile.FullName);
     }
 }
		/// <summary>
		/// Reprocess a specific study.
		/// </summary>
		/// <param name="partition">The ServerPartition the study is on.</param>
		/// <param name="location">The storage location of the study to process.</param>
		/// <param name="engine">The rules engine to use when processing the study.</param>
		/// <param name="postArchivalEngine">The rules engine used for studies that have been archived.</param>
		/// <param name="dataAccessEngine">The rules engine strictly used for setting data acess.</param>
		protected static void ProcessStudy(ServerPartition partition, StudyStorageLocation location, ServerRulesEngine engine, ServerRulesEngine postArchivalEngine, ServerRulesEngine dataAccessEngine)
		{
			if (!location.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle) || !location.AcquireWriteLock())
			{
				Platform.Log(LogLevel.Error, "Unable to lock study {0}. The study is being processed. (Queue State: {1})", location.StudyInstanceUid,location.QueueStudyStateEnum.Description); 
			}
			else
			{
				try
				{
					DicomFile msg = LoadInstance(location);
					if (msg == null)
					{
						Platform.Log(LogLevel.Error, "Unable to load file for study {0}", location.StudyInstanceUid);
						return;
					}

					bool archiveQueueExists;
					bool archiveStudyStorageExists;
					bool filesystemDeleteExists;
					using (IReadContext read = PersistentStoreRegistry.GetDefaultStore().OpenReadContext())
					{
						// Check for existing archive queue entries
						var archiveQueueBroker = read.GetBroker<IArchiveQueueEntityBroker>();
						var archiveQueueCriteria = new ArchiveQueueSelectCriteria();
						archiveQueueCriteria.StudyStorageKey.EqualTo(location.Key);
						archiveQueueExists = archiveQueueBroker.Count(archiveQueueCriteria) > 0;


						var archiveStorageBroker = read.GetBroker<IArchiveStudyStorageEntityBroker>();
						var archiveStudyStorageCriteria = new ArchiveStudyStorageSelectCriteria();
						archiveStudyStorageCriteria.StudyStorageKey.EqualTo(location.Key);
						archiveStudyStorageExists = archiveStorageBroker.Count(archiveStudyStorageCriteria) > 0;

						var filesystemQueueBroker = read.GetBroker<IFilesystemQueueEntityBroker>();
						var filesystemQueueCriteria = new FilesystemQueueSelectCriteria();
						filesystemQueueCriteria.StudyStorageKey.EqualTo(location.Key);
						filesystemQueueCriteria.FilesystemQueueTypeEnum.EqualTo(FilesystemQueueTypeEnum.DeleteStudy);
						filesystemDeleteExists = filesystemQueueBroker.Count(filesystemQueueCriteria) > 0;
					}

					using (var commandProcessor = new ServerCommandProcessor("Study Rule Processor"))
					{
						var context = new ServerActionContext(msg, location.FilesystemKey, partition, location.Key, commandProcessor);
					
						// Check if the Study has been archived 
						if (archiveStudyStorageExists && !archiveQueueExists && !filesystemDeleteExists)
						{
							// Add a command to delete the current filesystemQueue entries, so that they can 
							// be reinserted by the rules engine.
							context.CommandProcessor.AddCommand(new DeleteFilesystemQueueCommand(location.Key, ServerRuleApplyTimeEnum.StudyArchived));

							// How to deal with exiting FilesystemQueue entries is problematic here.  If the study
							// has been migrated off tier 1, we probably don't want to modify the tier migration 
							// entries.  Compression entries may have been entered when the Study was initially 
							// processed, we don't want to delete them, because they might still be valid.  
							// We just re-run the rules engine at this point, and delete only the StudyPurge entries,
							// since those we know at least would only be applied for archived studies.
							var studyRulesEngine = new StudyRulesEngine(postArchivalEngine, location, location.ServerPartition, location.LoadStudyXml());
							studyRulesEngine.Apply(ServerRuleApplyTimeEnum.StudyArchived, commandProcessor);

							// Post Archive doesn't allow data access rules.  Force Data Access rules to be reapplied
							// to these studies also.
							dataAccessEngine.Execute(context);
						}
						else
						{
							// Add a command to delete the current filesystemQueue entries, so that they can 
							// be reinserted by the rules engine.
							context.CommandProcessor.AddCommand(new DeleteFilesystemQueueCommand(location.Key,ServerRuleApplyTimeEnum.StudyProcessed));

							// Execute the rules engine, insert commands to update the database into the command processor.
							// Due to ticket #11673, we create a new rules engine instance for each study, since the Study QC rules
							// don't work right now with a single rules engine.
							//TODO CR (Jan 2014) - Check if we can go back to caching the rules engine to reduce database hits on the rules
							var studyRulesEngine = new StudyRulesEngine(location, location.ServerPartition, location.LoadStudyXml());
							studyRulesEngine.Apply(ServerRuleApplyTimeEnum.StudyProcessed, commandProcessor);
						}

						// Do the actual database updates.
						if (false == context.CommandProcessor.Execute())
						{
							Platform.Log(LogLevel.Error, "Unexpected failure processing Study level rules for study {0}", location.StudyInstanceUid);
						}

						// Log the FilesystemQueue related entries
						location.LogFilesystemQueue();
					}
				}
				finally
				{
					location.ReleaseWriteLock();
				}
			}
		}
Ejemplo n.º 13
0
		/// <summary>
		/// Archive the specified <see cref="ArchiveQueue"/> item.
		/// </summary>
		/// <param name="queueItem">The ArchiveQueue item to archive.</param>
		public void Run(ArchiveQueue queueItem)
		{
            using (ArchiveProcessorContext executionContext = new ArchiveProcessorContext(queueItem))
            {
                try
                {
                    if (!GetStudyStorageLocation(queueItem))
                    {
                        Platform.Log(LogLevel.Error,
                                     "Unable to find readable study storage location for archival queue request {0}.  Delaying request.",
                                     queueItem.Key);
                        queueItem.FailureDescription = "Unable to find readable study storage location for archival queue request.";
                        _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Pending, Platform.Time.AddMinutes(2));
                        return;
                    }

                    // First, check to see if we can lock the study, if not just reschedule the queue entry.
                    if (!_storageLocation.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle))
                    {
                        Platform.Log(LogLevel.Info, "Study {0} on partition {1} is currently locked, delaying archival.", _storageLocation.StudyInstanceUid, _hsmArchive.ServerPartition.Description);
                        queueItem.FailureDescription = "Study is currently locked, delaying archival.";
                        _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Pending, Platform.Time.AddMinutes(2));
                        return;
                    }

                    StudyIntegrityValidator validator = new StudyIntegrityValidator();
                    validator.ValidateStudyState("Archive", _storageLocation, StudyIntegrityValidationModes.Default);

                    using (IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                    {
                        ILockStudy studyLock = update.GetBroker<ILockStudy>();
                        LockStudyParameters parms = new LockStudyParameters
                                                    	{
                                                    		StudyStorageKey = queueItem.StudyStorageKey,
                                                    		QueueStudyStateEnum = QueueStudyStateEnum.ArchiveScheduled
                                                    	};
                    	bool retVal = studyLock.Execute(parms);
                        if (!parms.Successful || !retVal)
                        {
                            Platform.Log(LogLevel.Info, "Study {0} on partition {1} failed to lock, delaying archival.", _storageLocation.StudyInstanceUid, _hsmArchive.ServerPartition.Description);
                            queueItem.FailureDescription = "Study failed to lock, delaying archival.";
                            _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Pending, Platform.Time.AddMinutes(2));
                            return;
                        }
                        update.Commit();
                    }
					
                    string studyXmlFile = _storageLocation.GetStudyXmlPath(); 
                    
                    // Load the study Xml file, this is used to generate the list of dicom files to archive.
                    LoadStudyXml(studyXmlFile);

                    DicomFile file = LoadFileFromStudyXml();

                	string patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, string.Empty);
					string patientId = file.DataSet[DicomTags.PatientId].GetString(0, string.Empty);
					string accessionNumber = file.DataSet[DicomTags.AccessionNumber].GetString(0, string.Empty);

                	Platform.Log(LogLevel.Info,
                	             "Starting archival of study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} on archive {5}",
                	             _storageLocation.StudyInstanceUid, patientsName, patientId,
                	             accessionNumber, _hsmArchive.ServerPartition.Description,
                	             _hsmArchive.PartitionArchive.Description);

                    // Use the command processor to do the archival.
                    using (ServerCommandProcessor commandProcessor = new ServerCommandProcessor("Archive"))
                    {

						var archiveStudyCmd = new ArchiveStudyCommand(_storageLocation, _hsmArchive.HsmPath, executionContext.TempDirectory, _hsmArchive.PartitionArchive) 
								{ ForceCompress = HsmSettings.Default.CompressZipFiles };

						commandProcessor.AddCommand(archiveStudyCmd);
	                    commandProcessor.AddCommand(new UpdateArchiveQueueItemCommand(queueItem.GetKey(),_storageLocation.GetKey(), ArchiveQueueStatusEnum.Completed));
                        
                    	StudyRulesEngine studyEngine = new StudyRulesEngine(_storageLocation, _hsmArchive.ServerPartition, _studyXml);
                    	studyEngine.Apply(ServerRuleApplyTimeEnum.StudyArchived, commandProcessor);
						

                        if (!commandProcessor.Execute())
                        {
                            Platform.Log(LogLevel.Error,
                                         "Unexpected failure archiving study ({0}) to archive {1}: {2}, zip filename: {3}",
                                         _storageLocation.StudyInstanceUid, _hsmArchive.PartitionArchive.Description,
										 commandProcessor.FailureReason, archiveStudyCmd.OutputZipFilePath);

                            queueItem.FailureDescription = commandProcessor.FailureReason;
                            _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Failed, Platform.Time);
                        }
                        else
                            Platform.Log(LogLevel.Info, "Successfully archived study {0} on {1} to zip {2}",
                                         _storageLocation.StudyInstanceUid,
										 _hsmArchive.PartitionArchive.Description, archiveStudyCmd.OutputZipFilePath);

						// Log the current FilesystemQueue settings
						_storageLocation.LogFilesystemQueue();
                    }
                }
                catch (StudyIntegrityValidationFailure ex)
                {
                    StringBuilder error = new StringBuilder();
                    error.AppendLine(String.Format("Partition  : {0}", ex.ValidationStudyInfo.ServerAE));
                    error.AppendLine(String.Format("Patient    : {0}", ex.ValidationStudyInfo.PatientsName));
                    error.AppendLine(String.Format("Study Uid  : {0}", ex.ValidationStudyInfo.StudyInstaneUid));
                    error.AppendLine(String.Format("Accession# : {0}", ex.ValidationStudyInfo.AccessionNumber));
                    error.AppendLine(String.Format("Study Date : {0}", ex.ValidationStudyInfo.StudyDate));

                    queueItem.FailureDescription = error.ToString();
                    _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Failed, Platform.Time);
                }
                catch (Exception e)
                {
                    String msg = String.Format("Unexpected exception archiving study: {0} on {1}: {2}",
                                 _storageLocation.StudyInstanceUid, _hsmArchive.PartitionArchive.Description, e.Message);

                    Platform.Log(LogLevel.Error, e, msg);
                    queueItem.FailureDescription = msg;
                    _hsmArchive.UpdateArchiveQueue(queueItem, ArchiveQueueStatusEnum.Failed, Platform.Time);
                }
                finally
                {
                    // Unlock the Queue Entry
                    using (IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                    {
                        ILockStudy studyLock = update.GetBroker<ILockStudy>();
                        LockStudyParameters parms = new LockStudyParameters
                                                    	{
                                                    		StudyStorageKey = queueItem.StudyStorageKey,
                                                    		QueueStudyStateEnum = QueueStudyStateEnum.Idle
                                                    	};
                    	bool retVal = studyLock.Execute(parms);
                        if (!parms.Successful || !retVal)
                        {
                            Platform.Log(LogLevel.Info, "Study {0} on partition {1} is failed to unlock.", _storageLocation.StudyInstanceUid, _hsmArchive.ServerPartition.Description);
                        }
                        update.Commit();
                    }
                }
            }			
		}
		/// <summary>
		/// Create Duplicate SIQ Entry
		/// </summary>
		/// <param name="file"></param>
		/// <param name="location"></param>
		/// <param name="sourcePath"></param>
		/// <param name="queue"></param>
		/// <param name="uid"></param>
		/// <param name="data"></param>
		public static void CreateDuplicateSIQEntry(DicomFile file, StudyStorageLocation location, string sourcePath,
		                                           WorkQueue queue, WorkQueueUid uid, StudyProcessWorkQueueData data)
		{
			Platform.Log(LogLevel.Info, "Creating Work Queue Entry for duplicate...");
			String uidGroup = queue.GroupID ?? queue.GetKey().Key.ToString();
			using (var commandProcessor = new ServerCommandProcessor("Insert Work Queue entry for duplicate"))
			{
				commandProcessor.AddCommand(new FileDeleteCommand(sourcePath, true));

				var sopProcessingContext = new SopInstanceProcessorContext(commandProcessor, location, uidGroup);
				DicomProcessingResult result = Process(sopProcessingContext, file, data);
				if (!result.Successful)
				{
					FailUid(uid, true);
					return;
				}

				commandProcessor.AddCommand(new DeleteWorkQueueUidCommand(uid));

				if (!commandProcessor.Execute())
				{
					Platform.Log(LogLevel.Error, "Unexpected error when creating duplicate study integrity queue entry: {0}",
					             commandProcessor.FailureReason);
					FailUid(uid, true);
				}
			}
		}
Ejemplo n.º 15
0
		private void Purge(StudyItem study)
		{
			Task.Factory.StartNew(() =>
			{
				var archive = SelectPartitionArchive(study);

				if (archive == null)
				{
					MessageBox.Show("Please add an archive in the partition where this study is located");
					return;
				}

				if (study.IsArchivingScheduled())
				{
					if (
						MessageBox.Show(
							"This study is scheduled in the Archive Queue. Do you want to remove it from the queue before purging?",
							"Study is scheduled for archiving",
							MessageBoxButtons.YesNo) == System.Windows.Forms.DialogResult.Yes)
					{
						using (var processor = new ServerCommandProcessor("archive"))
						{
							processor.AddCommand(new DeleteAllArchiveQueueItemCommand(study.StudyStorage, archive));
							processor.Execute();
						}

					}
				}


				if (study.StudyStorageLocation.ArchiveLocations == null || !study.StudyStorageLocation.ArchiveLocations.Any())
				{
					var i = 0;
					using (var processor = new ServerCommandProcessor("archive"))
					{
						var archiveCommand = new ArchiveStudyCommand(study.StudyStorageLocation, GetArchivePath(archive), @"C:\temp", archive);
						archiveCommand.ProgressUpdated += (s, e) =>
						{

							study.OperationProgress = new OperationProgress()
							{
								Status = e.Percentage == 100 ? "Archived" : e.Status,
								Percentage = (int)e.Percentage
							};
						};
						
						processor.AddCommand(archiveCommand);

						if (!processor.Execute())
							MessageBox.Show(string.Format("Unable to archive study: {0}", processor.FailureException.Message));
					}
				}


				using (var processor = new ServerCommandProcessor("archive"))
				{
					processor.AddCommand(new PurgeStudyCommand(study.StudyStorage));
					if (!processor.Execute())
						MessageBox.Show(string.Format("Unable to purge study: {0}", processor.FailureException.Message));
					else
					{
						MessageBox.Show("Study has been succesfully purged");
						study.Status = "Nearline";
					}
				}
				
			});

			
		}
        private void ProcessInstanceLevelDelete(Model.WorkQueue item)
        {
            // ensure the Study is loaded.
            Study study = StorageLocation.Study;
            Platform.CheckForNullReference(study, "Study record doesn't exist");

            Platform.Log(LogLevel.Info, "Processing Instance Level Deletion for Study {0}, A#: {1}",
                                         study.StudyInstanceUid, study.AccessionNumber);

            bool completed = false;
            try
            {
                // Load the list of Sop Instances to be deleted from the WorkQueueUid
                LoadUids(item);

                // Go through the list of series and add commands
                // to delete each of them. It's all or nothing.                
                using (var processor = new ServerCommandProcessor(String.Format("Deleting Series from study {0}, A#:{1}, Patient: {2}, ID:{3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId)))
                {
                    StudyXml studyXml = StorageLocation.LoadStudyXml();
                    IDictionary<string, Series> existingSeries = StorageLocation.Study.Series;


                    // Add commands to delete the folders and update the xml
                    foreach (WorkQueueUid uid in WorkQueueUidList)
                    {
                        // Delete from study XML
                        if (studyXml.Contains(uid.SeriesInstanceUid, uid.SopInstanceUid))
                        {
                            //Note: DeleteDirectoryCommand  doesn't throw exception if the folder doesn't exist
                            var xmlUpdate = new RemoveInstanceFromStudyXmlCommand(StorageLocation, studyXml, uid.SeriesInstanceUid, uid.SopInstanceUid);
                            processor.AddCommand(xmlUpdate);
                        }

                        // Delete from filesystem
                        string path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
                        if (File.Exists(path))
                        {
                            var delDir = new FileDeleteCommand(path, true);
                            processor.AddCommand(delDir);
                        }
                    }

                    // flush the updated xml to disk
                    processor.AddCommand(new SaveXmlCommand(studyXml, StorageLocation));


                    // Update the db.. NOTE: these commands are executed at the end.
                    foreach (WorkQueueUid uid in WorkQueueUidList)
                    {
                        // Delete from DB
                        if (studyXml.Contains(uid.SeriesInstanceUid, uid.SopInstanceUid))
                        {
                            var delInstance = new UpdateInstanceCountCommand(StorageLocation, uid.SeriesInstanceUid, uid.SopInstanceUid);
                            processor.AddCommand(delInstance);
                            delInstance.Executing += DeleteSeriesFromDbExecuting;
                        }                       
                        else
                        {
                            // SOP doesn't exist 
                            Platform.Log(LogLevel.Info, "SOP {0} is invalid or no longer exists", uid.SopInstanceUid);
                        }

                        // The WorkQueueUid must be cleared before the entry can be removed from the queue
                        var deleteUid = new DeleteWorkQueueUidCommand(uid);
                        processor.AddCommand(deleteUid);

                        // Force a re-archival if necessary
                        processor.AddCommand(new InsertArchiveQueueCommand(item.ServerPartitionKey, item.StudyStorageKey));
                    }

                    if (!processor.Execute())
                        throw new ApplicationException(
                            String.Format("Error occurred when series from Study {0}, A#: {1}",
                                         study.StudyInstanceUid, study.AccessionNumber), processor.FailureException);                  
                }

                completed = true;
            }
            finally
            {
                if (completed)
                {
                    OnCompleted();
                    PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
                else
                {
                    PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None);
                }
            }
        }
Ejemplo n.º 17
0
		private DicomProcessingResult HandleDuplicateFile(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomMessageBase message, string sourceFilename, StudyProcessWorkQueueData data)
		{
			Study study = studyLocation.Study ??
						  studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext);
			if (study != null)
				Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2}  Patient: {3}  ID:{4})",
							 sopInstanceUid,
							 study.AccessionNumber, study.StudyInstanceUid,
							 study.PatientsName, study.PatientId);
			else
				Platform.Log(LogLevel.Info,
							 "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.",
							 sopInstanceUid, studyLocation.StudyInstanceUid);

			var sopProcessingContext = new SopInstanceProcessorContext(commandProcessor, studyLocation, _context.ContextID,
																_context.Request)
			{
				DuplicateProcessing = _context.DuplicateProcessing
			};
			DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, message, data,
			                                                                   sourceFilename);
			return result;
		}
Ejemplo n.º 18
0
		/// <summary>
		/// Apply the Rules engine.
		/// </summary>
		/// <remarks>
		/// <para>
		/// This method applies the rules engine to the first image in each series within a study.
		/// The assumption is that the actions generated by the engine can handle being applied more
		/// than once for the same study.  This is also done to handle the case of multi-modality
		/// studies where you may want the rules to be run against each series, because they may 
		/// apply differently.  
		/// </para>
		/// <para>
		/// Note that we are still applying series level moves, although there currently are not
		/// any series level rules.  We've somewhat turned the study level rules into series
		/// level rules.
		/// </para>
		/// </remarks>
		public void Apply(ServerRuleApplyTimeEnum applyTime)
		{

			using(var theProcessor = new ServerCommandProcessor("Study Rule Processor")
			{
				PrimaryServerPartitionKey = _partition.GetKey(),
				PrimaryStudyKey = _location.Study.GetKey()
			})
			{
                Apply(applyTime, theProcessor);

                if (false == theProcessor.Execute())
                {
                    Platform.Log(LogLevel.Error,
                                 "Unexpected failure processing Study level rules for study {0} on partition {1} for {2} apply time",
                                 _location.StudyInstanceUid, _partition.Description, applyTime.Description);
                }
			}
		}
        private void UpdateStudyOrDuplicates()
        {
            // StorageLocation object must be reloaded if we are overwriting the study
            // with info in the duplicates. 
            bool needReload = false;

            switch (_processDuplicateEntry.QueueData.Action)
            {
                case ProcessDuplicateAction.OverwriteUseDuplicates:

                    if (_processDuplicateEntry.QueueData.State.ExistingStudyUpdated)
                        Platform.Log(LogLevel.Info, "Existing Study has been updated before");
                    else
                    {
                        Platform.Log(LogLevel.Info, "Update Existing Study w/ Duplicate Info");
                        _studyUpdateCommands = BuildUpdateStudyCommandsFromDuplicate();
                        using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Existing Study w/ Duplicate Info"))
                        {
                            processor.AddCommand(new UpdateStudyCommand(ServerPartition, StorageLocation, _studyUpdateCommands, ServerRuleApplyTimeEnum.SopProcessed, WorkQueueItem));
                            if (!processor.Execute())
                            {
                                throw new ApplicationException(processor.FailureReason, processor.FailureException);
                            }

                            needReload = true;
                            _processDuplicateEntry.QueueData.State.ExistingStudyUpdated = true;
                        }
                    }
                    
                    break;
                    
                case ProcessDuplicateAction.OverwriteUseExisting:
                    ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                    _duplicateUpdateCommands = new List<BaseImageLevelUpdateCommand>();
                    _duplicateUpdateCommands.AddRange(commandBuilder.BuildCommands<StudyMatchingMap>(StorageLocation));
                    PrintCommands(_duplicateUpdateCommands);
                    break;
            }

            if (needReload)
            {
                StudyStorageLocation updatedStorageLocation;
                
                //NOTE: Make sure we are loading the storage location fro the database instead of the cache.
                if (!FilesystemMonitor.Instance.GetWritableStudyStorageLocation(WorkQueueItem.StudyStorageKey, out updatedStorageLocation))
                {
                    // this is odd.. we just updated it and now it's no longer writable?
                    throw new ApplicationException("Filesystem is not writable");
                }
                StorageLocation = updatedStorageLocation;
            }
        }
		private ProcessDuplicateResult OverwriteAndUpdateDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
		{
			Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

			var result = new ProcessDuplicateResult();
			result.ActionTaken = DuplicateProcessResultAction.Accept;

			using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
			{
				var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
				processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

				// Do so that the FileSize calculation inInsertStudyXmlCommand works
				dupFile.Filename = destination;

				// Update the StudyStream object
				var insertStudyXmlCommand = new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation);
				processor.AddCommand(insertStudyXmlCommand);

				// Ideally we don't need to insert the instance into the database since it's a duplicate.
				// However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
				// For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
				// But it will be corrected at the end of the processing when the study verification is executed.
				processor.AddCommand(new UpdateInstanceCommand(Context.StorageLocation.ServerPartition,Context.StorageLocation,dupFile));

				processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

				if (!processor.Execute())
				{
					EventManager.FireEvent(this, new FailedUpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason });

					// cause the item to fail
					throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
				}

				EventManager.FireEvent(this, new UpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize });
			}

			return result;
		}
        private void RemoveExistingImage(WorkQueueUid uid)
        {
            string path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(path))
                return;

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            var file = new DicomFile(path);
            file.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); // don't need to load pixel data cause we will delete it

            #if DEBUG
            int originalInstanceCountInXml = studyXml.NumberOfStudyRelatedInstances;
            int originalStudyInstanceCount = Study.NumberOfStudyRelatedInstances;
            int originalSeriesInstanceCount = Study.Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances;
            #endif

            using (var processor = new ServerCommandProcessor("Delete Existing Image"))
            {
                var seriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].ToString();
                var sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].ToString();

                processor.AddCommand(new FileDeleteCommand(path,true));
                processor.AddCommand(new RemoveInstanceFromStudyXmlCommand(StorageLocation, studyXml, seriesInstanceUid, sopInstanceUid));
                processor.AddCommand(new UpdateInstanceCountCommand(StorageLocation, seriesInstanceUid,sopInstanceUid));

                if (!processor.Execute())
                {
                    throw new ApplicationException(String.Format("Unable to remove existing image {0}", file.Filename), processor.FailureException);
                }
            }

            #if DEBUG
            Debug.Assert(!File.Exists(path));
            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCountInXml - 1);
            Debug.Assert(Study.Load(Study.Key).NumberOfStudyRelatedInstances == originalStudyInstanceCount - 1);
            Debug.Assert(Study.Load(Study.Key).Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances == originalSeriesInstanceCount - 1);
            #endif
        }
Ejemplo n.º 22
0
        private static void OnStudyRestored(StudyStorageLocation location)
        {
            ValidateStudy(location);

            using(var processor = new ServerCommandProcessor("Update Study Size In DB"))
            {
                processor.AddCommand(new UpdateStudySizeInDBCommand(location));
                if (!processor.Execute())
                {
                    Platform.Log(LogLevel.Error, "Unexpected error when trying to update the study size in DB:", processor.FailureReason);
                }
            }
        }
        void SaveDuplicateReport(WorkQueueUid uid, string sourceFile, string destinationFile, DicomFile dupFile, StudyXml studyXml)
        {
            using (var processor = new ServerCommandProcessor("Save duplicate report"))
            {
                processor.AddCommand(new RenameFileCommand(sourceFile, destinationFile, false));

                // Update the StudyStream object
                processor.AddCommand( new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.Execute();
            }
        }
        /// <summary>
        /// Migrates the study to new tier
        /// </summary>
        /// <param name="storage"></param>
        /// <param name="newFilesystem"></param>
        private void DoMigrateStudy(StudyStorageLocation storage, ServerFilesystemInfo newFilesystem)
        {
            Platform.CheckForNullReference(storage, "storage");
            Platform.CheckForNullReference(newFilesystem, "newFilesystem");

            TierMigrationStatistics stat = new TierMigrationStatistics {StudyInstanceUid = storage.StudyInstanceUid};
        	stat.ProcessSpeed.Start();
    	    StudyXml studyXml = storage.LoadStudyXml();
            stat.StudySize = (ulong) studyXml.GetStudySize(); 

            Platform.Log(LogLevel.Info, "About to migrate study {0} from {1} to {2}", 
                    storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.Description);
			
            string newPath = Path.Combine(newFilesystem.Filesystem.FilesystemPath, storage.PartitionFolder);
    	    DateTime startTime = Platform.Time;
            DateTime lastLog = Platform.Time;
    	    int fileCounter = 0;
    	    ulong bytesCopied = 0;
    	    long instanceCountInXml = studyXml.NumberOfStudyRelatedInstances;
            
            using (ServerCommandProcessor processor = new ServerCommandProcessor("Migrate Study"))
            {
                TierMigrationContext context = new TierMigrationContext
                                               	{
                                               		OriginalStudyLocation = storage,
                                               		Destination = newFilesystem
                                               	};

				// The multiple CreateDirectoryCommands are done so that rollback of the directories being created happens properly if either of the directories already exist.
				var origFolder = context.OriginalStudyLocation.GetStudyPath();
                processor.AddCommand(new CreateDirectoryCommand(newPath));

                newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyFolder);
                processor.AddCommand(new CreateDirectoryCommand(newPath));

                newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyInstanceUid);
                // don't create this directory so that it won't be backed up by MoveDirectoryCommand

				var copyDirCommand = new CopyDirectoryCommand(origFolder, newPath, 
                    delegate (string path)
                        {
                            // Update the progress. This is useful if the migration takes long time to complete.

                            FileInfo file = new FileInfo(path);
                            bytesCopied += (ulong)file.Length;
                            fileCounter++;
                            if (file.Extension != null && file.Extension.Equals(ServerPlatform.DicomFileExtension, StringComparison.InvariantCultureIgnoreCase))
                            {
                                TimeSpan elapsed = Platform.Time - lastLog;
                                TimeSpan totalElapsed = Platform.Time - startTime;
                                double speedInMBPerSecond = 0;
                                if (totalElapsed.TotalSeconds > 0)
                                {
                                    speedInMBPerSecond = (bytesCopied / 1024f / 1024f) / totalElapsed.TotalSeconds;
                                }

                                if (elapsed > TimeSpan.FromSeconds(WorkQueueSettings.Instance.TierMigrationProgressUpdateInSeconds))
                                {
                                    #region Log Progress

                                    StringBuilder stats = new StringBuilder();
                                    if (instanceCountInXml != 0)
                                    {
                                        float pct = (float)fileCounter / instanceCountInXml;
                                        stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2} ({3:0}% completed). Speed={4:0.00}MB/s",
                                                    fileCounter, bytesCopied / 1024f / 1024f, startTime, pct * 100, speedInMBPerSecond);
                                    }
                                    else
                                    {
                                        stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2}. Speed={3:0.00}MB/s",
                                                    fileCounter, bytesCopied / 1024f / 1024f, startTime, speedInMBPerSecond);

                                    }

                                    Platform.Log(LogLevel.Info, "Tier migration for study {0}: {1}", storage.StudyInstanceUid, stats.ToString());
                                    try
                                    {
                                        using (IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                                        {
                                            IWorkQueueEntityBroker broker = ctx.GetBroker<IWorkQueueEntityBroker>();
                                            WorkQueueUpdateColumns parameters = new WorkQueueUpdateColumns
                                                                                	{FailureDescription = stats.ToString()};
                                        	broker.Update(WorkQueueItem.GetKey(), parameters);
                                            ctx.Commit();
                                        }
                                    }
                                    catch
                                    {
                                    	// can't log the progress so far... just ignore it
                                    }
                                    finally
                                    {
                                        lastLog = DateTime.Now;
                                    } 
                                    #endregion
                                }
                            }
                        });
                processor.AddCommand(copyDirCommand);

                DeleteDirectoryCommand delDirCommand = new DeleteDirectoryCommand(origFolder, false)
                                                       	{RequiresRollback = false};
            	processor.AddCommand(delDirCommand);
                
                TierMigrateDatabaseUpdateCommand updateDbCommand = new TierMigrateDatabaseUpdateCommand(context);
                processor.AddCommand(updateDbCommand);

                Platform.Log(LogLevel.Info, "Start migrating study {0}.. expecting {1} to be moved", storage.StudyInstanceUid, ByteCountFormatter.Format(stat.StudySize));
                if (!processor.Execute())
                {
                	if (processor.FailureException != null)
                        throw processor.FailureException;
                	throw new ApplicationException(processor.FailureReason);
                }

            	stat.DBUpdate = updateDbCommand.Statistics;
                stat.CopyFiles = copyDirCommand.CopySpeed;
                stat.DeleteDirTime = delDirCommand.Statistics;
            }

            stat.ProcessSpeed.SetData(bytesCopied);
            stat.ProcessSpeed.End();

            Platform.Log(LogLevel.Info, "Successfully migrated study {0} from {1} to {2} in {3} [ {4} files, {5} @ {6}, DB Update={7}, Remove Dir={8}]",
                            storage.StudyInstanceUid, 
                            storage.FilesystemTierEnum,
                            newFilesystem.Filesystem.FilesystemTierEnum,
                            TimeSpanFormatter.Format(stat.ProcessSpeed.ElapsedTime), 
                            fileCounter,
                            ByteCountFormatter.Format(bytesCopied), 
                            stat.CopyFiles.FormattedValue,
                            stat.DBUpdate.FormattedValue,
                            stat.DeleteDirTime.FormattedValue);

    	    string originalPath = storage.GetStudyPath();
            if (Directory.Exists(storage.GetStudyPath()))
            {
                Platform.Log(LogLevel.Info, "Original study folder could not be deleted. It must be cleaned up manually: {0}", originalPath);
                ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, WorkQueueItem.WorkQueueTypeEnum.ToString(), 1000, GetWorkQueueContextData(WorkQueueItem), TimeSpan.Zero,
                    "Study has been migrated to a new tier. Original study folder must be cleaned up manually: {0}", originalPath);
            }

            UpdateAverageStatistics(stat);
            
        }
        void CreateDuplicateSIQEntry(WorkQueueUid uid, DicomFile file, List<DicomAttributeComparisonResult> differences)
        {
            Platform.Log(LogLevel.Info, "Duplicate SOP is different from existing copy. Creating duplicate SIQ entry. SOP: {0}", uid.SopInstanceUid);

            using (var processor = new ServerCommandProcessor("Create Duplicate SIQ Entry"))
            {
                var insertCommand = new InsertOrUpdateEntryCommand(
                    uid.GroupID, StorageLocation, file,
                    ServerHelper.GetDuplicateGroupPath(StorageLocation, uid),
                    string.IsNullOrEmpty(uid.RelativePath)
                        ? Path.Combine(StorageLocation.StudyInstanceUid, uid.SopInstanceUid + "." + uid.Extension)
                        : uid.RelativePath,
                    differences);
                processor.AddCommand(insertCommand);

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.Execute();
            }
            
        }
Ejemplo n.º 26
0
		private void SaveFile(DicomFile file)
		{
			String seriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty);
			String sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty);

			String destPath = _oldStudyLocation.FilesystemPath;
			
			using (ServerCommandProcessor filesystemUpdateProcessor = new ServerCommandProcessor("Update Study"))
			{
				filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath));

				destPath = Path.Combine(destPath, _partition.PartitionFolder);
				filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath));

				destPath = Path.Combine(destPath, _oldStudyFolder);
				filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath));

				destPath = Path.Combine(destPath, _newStudyInstanceUid);
				filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath));

				destPath = Path.Combine(destPath, seriesInstanceUid);
				filesystemUpdateProcessor.AddCommand(new CreateDirectoryCommand(destPath));

				destPath = Path.Combine(destPath, sopInstanceUid);
				destPath += ServerPlatform.DicomFileExtension;

				// Overwrite the prior file
				SaveDicomFileCommand saveCommand = new SaveDicomFileCommand(destPath, file, false);
				filesystemUpdateProcessor.AddCommand(saveCommand);

				if (_rulesEngine != null)
				{
					ServerActionContext context = new ServerActionContext(file, _oldStudyLocation.FilesystemKey, _partition, _oldStudyLocation.Key, filesystemUpdateProcessor);
					_rulesEngine.Execute(context); 
				}

				if (!filesystemUpdateProcessor.Execute())
				{
					throw new ApplicationException(String.Format("Unable to update image {0} : {1}", file.Filename, filesystemUpdateProcessor.FailureReason));
				}
			}            
		}
		/// <summary>
		/// Process a specific DICOM file related to a <see cref="WorkQueue"/> request.
		/// </summary>
		/// <remarks>
		/// <para>
		/// On success and if <see cref="uid"/> is set, the <see cref="WorkQueueUid"/> field is deleted.
		/// </para>
		/// </remarks>
		/// <param name="stream">The <see cref="StudyXml"/> file to update with information from the file.</param>
		/// <param name="group">A group the sop is associated with.</param>
		/// <param name="file">The file to process.</param>
		/// <param name="compare">Flag to compare the demographics of <see cref="file"/> with the demographics in the database</param>
		/// <param name="retry">Flag telling if the item should be retried on failure.  Note that if the item is a duplicate, the WorkQueueUid item is not failed. </param>
		/// <param name="uid">An optional WorkQueueUid associated with the entry, that will be deleted upon success or failed on failure.</param>
		/// <param name="deleteFile">An option file to delete as part of the process</param>
		/// <param name="sopType">Flag telling if the SOP is a new or updated SOP</param>
        /// <exception cref="Exception"/>
        /// <exception cref="DicomDataException"/>
		public  ProcessingResult ProcessFile(string group, DicomFile file, StudyXml stream, bool compare, bool retry, WorkQueueUid uid, string deleteFile, SopInstanceProcessorSopType sopType)
		{
		    Platform.CheckForNullReference(file, "file");

            try
            {
                CheckDataLength(file);

                _instanceStats.ProcessTime.Start();
                ProcessingResult result = new ProcessingResult
                                              {
                                                  Status = ProcessingStatus.Success
                                              };

                using (ServerCommandProcessor processor = new ServerCommandProcessor("Process File"))
                {
                    SopInstanceProcessorContext processingContext = new SopInstanceProcessorContext(processor,
                                                                                      _context.StorageLocation, group);

                    if (EnforceNameRules)
                    {
                        _patientNameRules.Apply(file);
                    }

                    if (compare && ShouldReconcile(_context.StorageLocation, file))
                    {
                        ScheduleReconcile(processingContext, file, uid);
                        result.Status = ProcessingStatus.Reconciled;
                    }
                    else
                    {
                        InsertInstance(file, stream, uid, deleteFile,sopType);
                        result.Status = ProcessingStatus.Success;
                    }
                }

                _instanceStats.ProcessTime.End();

                if (_context.SopProcessedRulesEngine.Statistics.LoadTime.IsSet)
                    _instanceStats.SopRulesLoadTime.Add(_context.SopProcessedRulesEngine.Statistics.LoadTime);

                if (_context.SopProcessedRulesEngine.Statistics.ExecutionTime.IsSet)
                    _instanceStats.SopEngineExecutionTime.Add(_context.SopProcessedRulesEngine.Statistics.ExecutionTime);

                _context.SopProcessedRulesEngine.Statistics.Reset();

                //TODO: Should throw exception if result is failed?
                return result;

            }
            catch (Exception e)
            {
                // If its a duplicate, ignore the exception, and just throw it
                if (deleteFile != null && (e is InstanceAlreadyExistsException
                        || e.InnerException is InstanceAlreadyExistsException))
                    throw;

                if (uid != null)
                    FailUid(uid, retry);
                throw;
            }
		}
Ejemplo n.º 28
0
		 public bool GetStreamedFileStorageFolder(DicomMessageBase message, out string folder, out string filesystemStreamingFolder)
		 {
			 var location = GetWritableOnlineStorage(message);

			 using (
				 var commandProcessor =
					 new ServerCommandProcessor(String.Format("Streaming folder for Study Instance {0}", location.StudyInstanceUid)))
			 {

				 String path = Path.Combine(location.FilesystemPath, location.PartitionFolder);
				 commandProcessor.AddCommand(new CreateDirectoryCommand(path));

				 filesystemStreamingFolder = path = Path.Combine(path, ServerPlatform.StreamingStorageFolder);
				 commandProcessor.AddCommand(new CreateDirectoryCommand(path));

				 path = Path.Combine(path, _context.ContextID /* the AE title + timestamp */);
				 commandProcessor.AddCommand(new CreateDirectoryCommand(path));

				 path = Path.Combine(path, location.StudyInstanceUid);
				 commandProcessor.AddCommand(new CreateDirectoryCommand(path));

				 if (!commandProcessor.Execute())
				 {
					 folder = null;
					 Platform.Log(LogLevel.Warn, "Unable to create directory to store study: {0}: {1}", path,
					              commandProcessor.FailureReason);
					 return false;
				 }
				 folder = path;
				 return true;
			 }
		 }
        /// <summary>
        /// Move the file specified in the path to the incoming folder so that it will be imported again
        /// </summary>
        /// <param name="path"></param>
        private void MoveFileToIncomingFolder(string path)
        {
            Platform.Log(LogLevel.Debug, "Moving file {0} to incoming folder", path);

            // should not proceed because it may mean incomplete study
            if (!File.Exists(path))
                throw new FileNotFoundException(string.Format("File is missing: {0}", path));
            
            // move the file to the Incoming folder to reprocess            
            using (var processor = new ServerCommandProcessor("Move file back to incoming folder"))
            {
                var fileInfo = new FileInfo(path);
                var incomingPath = GetServerPartitionIncomingFolder();
                incomingPath = Path.Combine(incomingPath, "FromWorkQueue");
                incomingPath = Path.Combine(incomingPath, StorageLocation.StudyInstanceUid);

                var createDirCommand = new CreateDirectoryCommand(incomingPath);
                processor.AddCommand(createDirCommand);

                incomingPath = Path.Combine(incomingPath, fileInfo.Name);
                var move = new RenameFileCommand(path, incomingPath, true);
                processor.AddCommand(move);

                if (!processor.Execute())
                {
                    throw new Exception("Unexpected error happened when trying to move file back to the incoming folder for reprocess", processor.FailureException);
                }

                Platform.Log(LogLevel.Info, "File {0} has been moved to the incoming folder.", path);

            }
        }
Ejemplo n.º 30
0
        /// <summary>
        /// Imports the specified <see cref="DicomMessageBase"/> object into the system.
        /// The object will be inserted into the <see cref="WorkQueue"/> for processing and
        /// if it's a duplicate, proper checks will be done and depending on the policy, it will be 
        /// ignored, rejected or inserted into the <see cref="StudyIntegrityQueue"/> for manual intervention.
        /// </summary>
        /// <param name="message">The DICOM object to be imported.</param>
        /// <returns>An instance of <see cref="DicomProcessingResult"/> that describes the result of the processing.</returns>
        /// <exception cref="DicomDataException">Thrown when the DICOM object contains invalid data</exception>
        public DicomProcessingResult Import(DicomMessageBase message)
        {
            Platform.CheckForNullReference(message, "message");
            String studyInstanceUid;
            String seriesInstanceUid;
            String sopInstanceUid;
            String accessionNumber;
            String patientsName;

	        LoadMessageUids(message, out studyInstanceUid, out seriesInstanceUid, out sopInstanceUid,
	                             out accessionNumber, out patientsName);

        	DicomFile file = null;

            // Scrub the name for invalid characters.
            string newName = XmlUtils.XmlCharacterScrub(patientsName);
            if (!newName.Equals(patientsName))
                message.DataSet[DicomTags.PatientsName].SetStringValue(newName);

			var result = new DicomProcessingResult
			                               	{
			                               		Successful = true,
			                               		StudyInstanceUid = studyInstanceUid,
			                               		SeriesInstanceUid = seriesInstanceUid,
			                               		SopInstanceUid = sopInstanceUid,
			                               		AccessionNumber = accessionNumber
			                               	};

        	try
			{
				Validate(message);
			}
			catch (DicomDataException e)
			{
				result.SetError(DicomStatuses.ProcessingFailure, e.Message);
				return result;
			}

            // Use the command processor for rollback capabilities.
            using (var commandProcessor = new ServerCommandProcessor(String.Format("Processing Sop Instance {0}", sopInstanceUid)))
            {
                try
                {
                    string failureMessage;
                    StudyStorageLocation studyLocation = GetWritableOnlineStorage(message);

                    // GetWritableOnlineStorage should throw an exception if the study location cannot be found.
                    Platform.CheckForNullReference(studyLocation, "studyLocation");

                    if (!studyLocation.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle)
                        && (!studyLocation.QueueStudyStateEnum.Equals(QueueStudyStateEnum.ProcessingScheduled)))
                    {
                        failureMessage = String.Format("Study {0} on partition {1} is being processed: {2}, can't accept new images.",
                                                       studyLocation.StudyInstanceUid, _context.Partition.Description, studyLocation.QueueStudyStateEnum.Description);
                        result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage);
                        return result;
                    }
                	if (studyLocation.StudyStatusEnum.Equals(StudyStatusEnum.OnlineLossy))
                	{
                		if (studyLocation.IsLatestArchiveLossless)
                		{
                			result.DicomStatus = DicomStatuses.StorageStorageOutOfResources;
                			failureMessage = String.Format("Study {0} on partition {1} can't accept new images due to lossy compression of the study.  Restoring study.",
                			                               studyLocation.StudyInstanceUid, _context.Partition.Description);
                			Platform.Log(LogLevel.Error, failureMessage);
                			if (ServerHelper.InsertRestoreRequest(studyLocation) == null)
                			{
                				Platform.Log(LogLevel.Warn, "Unable to insert Restore Request for Study");
                			}

                			result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage);
                            result.RestoreRequested = true;
                			return result;
                		}
                	}

                	String path = studyLocation.FilesystemPath;
                    String finalDest = studyLocation.GetSopInstancePath(seriesInstanceUid, sopInstanceUid);
                    file = ConvertToDicomFile(message, finalDest, _context.SourceAE);

                    if (HasUnprocessedCopy(studyLocation.Key, seriesInstanceUid, sopInstanceUid))
                    {
                        var accept = false;

                        // This is a special case: #10569
                        // Allow user to revive an orphaned study by reprocessing the files found in the filesystem
                        if (File.Exists(finalDest))
                        {
                            accept = DuplicatePolicy.IsParitionDuplicatePolicyOverridden(studyLocation);
                        }
                        
                        if (!accept)
                        {
                            failureMessage = string.Format("Another copy of the SOP Instance was received but has not been processed: {0}", sopInstanceUid);
                            result.SetError(DicomStatuses.DuplicateSOPInstance, failureMessage);
                            return result;
                        }
                    }

                    var data = new StudyProcessWorkQueueData
                        {
                            ReceivingAeTitle = _context.AlternateAe == null
                                                  ? _context.Partition.AeTitle
                                                  : _context.AlternateAe.AeTitle
                        };

                	if (File.Exists(finalDest))
                	{
                		result = HandleDuplicate(sopInstanceUid, studyLocation, commandProcessor, file, data);
                		if (!result.Successful)
                			return result;
                	}
                	else
                	{
                	    HandleNonDuplicate(seriesInstanceUid, sopInstanceUid, studyLocation, commandProcessor, file, path,
                	                       false, data);
                	}

                	if (commandProcessor.Execute())
                	{
                		result.DicomStatus = DicomStatuses.Success;
                	}
                	else
                	{
                		failureMessage =
                			String.Format("Failure processing message: {0}. Sending failure status.",
                			              commandProcessor.FailureReason);
                		result.SetError(DicomStatuses.ProcessingFailure, failureMessage);
                		// processor already rolled back
                		return result;
                	}
                }
                catch(NoWritableFilesystemException)
                {
                    String failureMessage = String.Format("Unable to process image, no writable filesystem found for Study UID {0}.", sopInstanceUid);
                    commandProcessor.Rollback();
                    result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage);
                }
                catch(StudyIsNearlineException e)
                {
                    String failureMessage = e.RestoreRequested
                                                ? String.Format("{0}. Restore has been requested.", e.Message)
                                                : e.Message;

                    Platform.Log(LogLevel.Error, failureMessage);
                    commandProcessor.Rollback();
                    result.SetError(DicomStatuses.ProcessingFailure, failureMessage);
                }
                catch (FilesystemNotWritableException)
                {
					commandProcessor.Rollback();

                	string folder;
					if (!FilesystemMonitor.Instance.GetWriteableIncomingFolder(_context.Partition, out folder))
					{
						String failureMessage =
							String.Format("Unable to process image, study storage location is missing or not writeable: {0}.", sopInstanceUid);
						result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage);
						return result;
					}

                	if (file == null)
                		file = ConvertToDicomFile(message, string.Empty, _context.SourceAE);

                	if (!SaveToFolder(folder, sopInstanceUid, studyInstanceUid, file))
                	{
						String failureMessage =
							String.Format("Study storage location not writeable and no writeable incoming folder: {0}.", sopInstanceUid);
						result.SetError(DicomStatuses.StorageStorageOutOfResources, failureMessage);
						return result;
                	}

                	Platform.Log(LogLevel.Info, "Saved existing SOP without writeable storage location to {0} folder: {1}",
                	             FilesystemMonitor.ImportDirectorySuffix, sopInstanceUid);
					result.DicomStatus = DicomStatuses.Success;
                	return result;
                }
                catch (Exception e)
                {
                    Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}.  Rolling back operation.", commandProcessor.Description);
                    commandProcessor.Rollback();
                    result.SetError(result.DicomStatus ?? DicomStatuses.ProcessingFailure, e.Message);
                }
            }

            return result;
        }