Ejemplo n.º 1
0
        static public WorkQueueUid Load(IPersistenceContext read, ServerEntityKey key)
        {
            var          broker    = read.GetBroker <IWorkQueueUidEntityBroker>();
            WorkQueueUid theObject = broker.Load(key);

            return(theObject);
        }
Ejemplo n.º 2
0
 static public WorkQueueUid Insert(WorkQueueUid entity)
 {
     using (var update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
     {
         WorkQueueUid newEntity = Insert(update, entity);
         update.Commit();
         return(newEntity);
     }
 }
        private bool ProcessWorkQueueUid(Model.WorkQueue item, WorkQueueUid sop, StudyXml studyXml, IDicomCodecFactory theCodecFactory)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(sop, "sop");
            Platform.CheckForNullReference(studyXml, "studyXml");

            if (!studyXml.Contains(sop.SeriesInstanceUid, sop.SopInstanceUid))
            {
                // Uid was inserted but not in the study xml.
                // Auto-recovery might have detect problem with that file and remove it from the study.
                // Assume the study xml has been corrected and ignore the uid.
                Platform.Log(LogLevel.Warn, "Skipping SOP {0} in series {1}. It is no longer part of the study.", sop.SopInstanceUid, sop.SeriesInstanceUid);

                // Delete it out of the queue
                DeleteWorkQueueUid(sop);
                return true;
            }

            string basePath = Path.Combine(StorageLocation.GetStudyPath(), sop.SeriesInstanceUid);
            basePath = Path.Combine(basePath, sop.SopInstanceUid);
            string path;
            if (sop.Extension != null)
                path = basePath + "." + sop.Extension;
            else
                path = basePath + ServerPlatform.DicomFileExtension;

            try
            {
                ProcessFile(item, sop, path, studyXml, theCodecFactory);

                // WorkQueueUid has been deleted out by the processor

                return true;
            }
            catch (Exception e)
            {
                if (e.InnerException != null && e.InnerException is DicomCodecUnsupportedSopException)
                {
                    Platform.Log(LogLevel.Warn, e, "Instance not supported for compressor: {0}.  Deleting WorkQueue entry for SOP {1}", e.Message, sop.SopInstanceUid);

                    item.FailureDescription = e.InnerException != null ? e.InnerException.Message : e.Message;

                    // Delete it out of the queue
                    DeleteWorkQueueUid(sop);

                    return false;
                }
                Platform.Log(LogLevel.Error, e, "Unexpected exception when compressing file: {0} SOP Instance: {1}", path, sop.SopInstanceUid);
                item.FailureDescription = e.InnerException != null ? e.InnerException.Message : e.Message;

                sop.FailureCount++;

                UpdateWorkQueueUid(sop);

                return false;
            }
        }
Ejemplo n.º 4
0
        static public WorkQueueUid Insert(IUpdateContext update, WorkQueueUid entity)
        {
            var broker        = update.GetBroker <IWorkQueueUidEntityBroker>();
            var updateColumns = new WorkQueueUidUpdateColumns();

            updateColumns.WorkQueueKey      = entity.WorkQueueKey;
            updateColumns.Failed            = entity.Failed;
            updateColumns.Duplicate         = entity.Duplicate;
            updateColumns.FailureCount      = entity.FailureCount;
            updateColumns.GroupID           = entity.GroupID;
            updateColumns.RelativePath      = entity.RelativePath;
            updateColumns.Extension         = entity.Extension;
            updateColumns.SeriesInstanceUid = entity.SeriesInstanceUid;
            updateColumns.SopInstanceUid    = entity.SopInstanceUid;
            WorkQueueUid newEntity = broker.Insert(updateColumns);

            return(newEntity);
        }
		/// <summary>
		/// Process a specific DICOM file related to a <see cref="WorkQueue"/> request.
		/// </summary>
		/// <remarks>
		/// <para>
		/// On success and if <see cref="uid"/> is set, the <see cref="WorkQueueUid"/> field is deleted.
		/// </para>
		/// </remarks>
		/// <param name="stream">The <see cref="StudyXml"/> file to update with information from the file.</param>
		/// <param name="group">A group the sop is associated with.</param>
		/// <param name="file">The file to process.</param>
		/// <param name="compare">Flag to compare the demographics of <see cref="file"/> with the demographics in the database</param>
		/// <param name="retry">Flag telling if the item should be retried on failure.  Note that if the item is a duplicate, the WorkQueueUid item is not failed. </param>
		/// <param name="uid">An optional WorkQueueUid associated with the entry, that will be deleted upon success or failed on failure.</param>
		/// <param name="deleteFile">An option file to delete as part of the process</param>
		/// <param name="sopType">Flag telling if the SOP is a new or updated SOP</param>
        /// <exception cref="Exception"/>
        /// <exception cref="DicomDataException"/>
		public  ProcessingResult ProcessFile(string group, DicomFile file, StudyXml stream, bool compare, bool retry, WorkQueueUid uid, string deleteFile, SopInstanceProcessorSopType sopType)
		{
		    Platform.CheckForNullReference(file, "file");

            try
            {
                CheckDataLength(file);

                _instanceStats.ProcessTime.Start();
                ProcessingResult result = new ProcessingResult
                                              {
                                                  Status = ProcessingStatus.Success
                                              };

                using (ServerCommandProcessor processor = new ServerCommandProcessor("Process File"))
                {
                    SopInstanceProcessorContext processingContext = new SopInstanceProcessorContext(processor,
                                                                                      _context.StorageLocation, group);

                    if (EnforceNameRules)
                    {
                        _patientNameRules.Apply(file);
                    }

                    if (compare && ShouldReconcile(_context.StorageLocation, file))
                    {
                        ScheduleReconcile(processingContext, file, uid);
                        result.Status = ProcessingStatus.Reconciled;
                    }
                    else
                    {
                        InsertInstance(file, stream, uid, deleteFile,sopType);
                        result.Status = ProcessingStatus.Success;
                    }
                }

                _instanceStats.ProcessTime.End();

                if (_context.SopProcessedRulesEngine.Statistics.LoadTime.IsSet)
                    _instanceStats.SopRulesLoadTime.Add(_context.SopProcessedRulesEngine.Statistics.LoadTime);

                if (_context.SopProcessedRulesEngine.Statistics.ExecutionTime.IsSet)
                    _instanceStats.SopEngineExecutionTime.Add(_context.SopProcessedRulesEngine.Statistics.ExecutionTime);

                _context.SopProcessedRulesEngine.Statistics.Reset();

                //TODO: Should throw exception if result is failed?
                return result;

            }
            catch (Exception e)
            {
                // If its a duplicate, ignore the exception, and just throw it
                if (deleteFile != null && (e is InstanceAlreadyExistsException
                        || e.InnerException is InstanceAlreadyExistsException))
                    throw;

                if (uid != null)
                    FailUid(uid, retry);
                throw;
            }
		}
        /// <summary>
        /// Called after the specified <see cref="WorkQueueUid"/> has been processed
        /// </summary>
        /// <param name="item">The <see cref="WorkQueue"/> item being processed</param>
        /// <param name="uid">The <see cref="WorkQueueUid"/> being processed</param>
        protected virtual void OnProcessUidEnd(Model.WorkQueue item, WorkQueueUid uid)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(uid, "uid");

            if (uid.Duplicate)
            {
                String dupPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid);
                // Delete the container if it's empty
                var f = new FileInfo(dupPath);

                if (f.Directory!=null && DirectoryUtility.DeleteIfEmpty(f.Directory.FullName))
                {
                    DirectoryUtility.DeleteIfEmpty(ServerHelper.GetDuplicateGroupPath(StorageLocation, uid));
                }
            }
        }
        private static void RemoveWorkQueueUid(WorkQueueUid uid, string fileToDelete)
        {
            using (var processor = new ServerCommandProcessor("Remove Work Queue Uid"))
            {
                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
                if (String.IsNullOrEmpty(fileToDelete) == false)
                {
                    processor.AddCommand(new FileDeleteCommand(fileToDelete, true));

                }

                if (!processor.Execute())
                {
                    String error = String.Format("Unable to delete Work Queue Uid {0}: {1}", uid.Key, processor.FailureReason);
                    Platform.Log(LogLevel.Error, error);
                    throw new ApplicationException(error, processor.FailureException);
                }
            }

        }
        /// <summary>
        /// ProcessSavedFile a specified <see cref="WorkQueueUid"/>
        /// </summary>
        /// <param name="item">The <see cref="WorkQueue"/> item being processed</param>
        /// <param name="sop">The <see cref="WorkQueueUid"/> being processed</param>
        /// <param name="studyXml">The <see cref="StudyXml"/> object for the study being processed</param>
        /// <returns>true if the <see cref="WorkQueueUid"/> is successfully processed. false otherwise</returns>
        protected virtual bool ProcessWorkQueueUid(Model.WorkQueue item, WorkQueueUid sop, StudyXml studyXml)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(sop, "sop");
            Platform.CheckForNullReference(studyXml, "studyXml");

            OnProcessUidBegin(item, sop);

            string path = null;
            
            try
            {
                if (sop.Duplicate && sop.Extension != null)
                {
                    path = ServerHelper.GetDuplicateUidPath(StorageLocation, sop);
                    var file = new DicomFile(path);
                    file.Load();

                    InstancePreProcessingResult result = PreProcessFile(sop, file);

                    if (false ==file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid) 
                            || result.DiscardImage)
                    {
                        RemoveWorkQueueUid(sop, null);
                    }
                    else 
                    {
                    	var duplicateResult = ProcessDuplicate(file, sop, studyXml);
                        if (duplicateResult.ActionTaken == DuplicateProcessResultAction.Delete || duplicateResult.ActionTaken == DuplicateProcessResultAction.Accept)
                        {
                            // make sure the folder is also deleted if it's empty
                            string folder = Path.GetDirectoryName(path);

                            String reconcileRootFolder = ServerHelper.GetDuplicateFolderRootPath(StorageLocation);
                            DirectoryUtility.DeleteIfEmpty(folder, reconcileRootFolder);
                        }
                    }
                }
                else
                {
                    try
                    {
                        path = StorageLocation.GetSopInstancePath(sop.SeriesInstanceUid, sop.SopInstanceUid);
                        var file = new DicomFile(path);
                        file.Load();

                        InstancePreProcessingResult result = PreProcessFile(sop, file);

                        if (false == file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid) 
                            || result.DiscardImage)
                        {
                            RemoveWorkQueueUid(sop, path);
                        }
                        else
                        {
                            ProcessFile(sop, file, studyXml, !result.AutoReconciled);
                        }
                    }
                    catch (DicomException ex)
                    {
                        // bad file. Remove it from the filesystem and the queue
                        RemoveBadDicomFile(path, ex.Message);
                        DeleteWorkQueueUid(sop);
                        return false;
                    }
                    
                }
                
                return true;
            }
            catch (StudyIsNearlineException)
            {
                // handled by caller
                throw;
            }
            catch (Exception e)
            {
                Platform.Log(LogLevel.Error, e, "Unexpected exception when processing file: {0} SOP Instance: {1}", path, sop.SopInstanceUid);
                item.FailureDescription = e.InnerException != null ? 
					String.Format("{0}:{1}", e.GetType().Name, e.InnerException.Message) : String.Format("{0}:{1}", e.GetType().Name, e.Message);

				//No longer needed.  Update was moved into the SopInstanceProcessor
                //sop.FailureCount++;
                //UpdateWorkQueueUid(sop);
                return false;
                
            }
            finally
            {
                OnProcessUidEnd(item, sop);
            }            
        }
        void CreateDuplicateSIQEntry(WorkQueueUid uid, DicomFile file, List<DicomAttributeComparisonResult> differences)
        {
            Platform.Log(LogLevel.Info, "Duplicate SOP is different from existing copy. Creating duplicate SIQ entry. SOP: {0}", uid.SopInstanceUid);

            using (var processor = new ServerCommandProcessor("Create Duplicate SIQ Entry"))
            {
                var insertCommand = new InsertOrUpdateEntryCommand(
                    uid.GroupID, StorageLocation, file,
                    ServerHelper.GetDuplicateGroupPath(StorageLocation, uid),
                    string.IsNullOrEmpty(uid.RelativePath)
                        ? Path.Combine(StorageLocation.StudyInstanceUid, uid.SopInstanceUid + "." + uid.Extension)
                        : uid.RelativePath,
                    differences);
                processor.AddCommand(insertCommand);

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.Execute();
            }
            
        }
		private ProcessDuplicateResult OverwriteAndUpdateDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
		{
			Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

			var result = new ProcessDuplicateResult();
			result.ActionTaken = DuplicateProcessResultAction.Accept;

			using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
			{
				var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
				processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

				// Do so that the FileSize calculation inInsertStudyXmlCommand works
				dupFile.Filename = destination;

				// Update the StudyStream object
				var insertStudyXmlCommand = new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation);
				processor.AddCommand(insertStudyXmlCommand);

				// Ideally we don't need to insert the instance into the database since it's a duplicate.
				// However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
				// For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
				// But it will be corrected at the end of the processing when the study verification is executed.
				processor.AddCommand(new UpdateInstanceCommand(Context.StorageLocation.ServerPartition,Context.StorageLocation,dupFile));

				processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

				if (!processor.Execute())
				{
					EventManager.FireEvent(this, new FailedUpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason });

					// cause the item to fail
					throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
				}

				EventManager.FireEvent(this, new UpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize });
			}

			return result;
		}
        void SaveDuplicateReport(WorkQueueUid uid, string sourceFile, string destinationFile, DicomFile dupFile, StudyXml studyXml)
        {
            using (var processor = new ServerCommandProcessor("Save duplicate report"))
            {
                processor.AddCommand(new RenameFileCommand(sourceFile, destinationFile, false));

                // Update the StudyStream object
                processor.AddCommand( new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.Execute();
            }
        }
 private void DeleteDuplicate(WorkQueueUid uid)
 {
     using (ServerCommandProcessor processor = new ServerCommandProcessor("Delete Received Duplicate"))
     {
         FileInfo duplicateFile = GetDuplicateSopFile(uid);
         processor.AddCommand(new FileDeleteCommand(duplicateFile.FullName,true));
         processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
         if (!processor.Execute())
         {
             throw new ApplicationException(processor.FailureReason, processor.FailureException);
         }
     	Platform.Log(ServerPlatform.InstanceLogLevel, "Discard duplicate SOP {0} in {1}", uid.SopInstanceUid, duplicateFile.FullName);
     }
 }
        private void ProcessUid(WorkQueueUid uid)
        {
            switch(_processDuplicateEntry.QueueData.Action)
            {
                case ProcessDuplicateAction.Delete:
                    DeleteDuplicate(uid);
                    break;

                case ProcessDuplicateAction.OverwriteUseDuplicates:
                    OverwriteExistingInstance(uid, ProcessDuplicateAction.OverwriteUseDuplicates);
                    break;

                case ProcessDuplicateAction.OverwriteUseExisting:
                    OverwriteExistingInstance(uid, ProcessDuplicateAction.OverwriteUseExisting);
                    break;

                case ProcessDuplicateAction.OverwriteAsIs:
                    OverwriteExistingInstance(uid, ProcessDuplicateAction.OverwriteAsIs);
                    break;

                default:
                    throw new NotSupportedException(
                        String.Format("Not supported action: {0}", _processDuplicateEntry.QueueData.Action));
            }
        }
Ejemplo n.º 14
0
 static public WorkQueueUid Insert(WorkQueueUid entity)
 {
     using (var update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
     {
         WorkQueueUid newEntity = Insert(update, entity);
         update.Commit();
         return newEntity;
     }
 }
		/// <summary>
		/// Schedules a reconciliation for the specified <see cref="DicomFile"/>
		/// </summary>
		/// <param name="context"></param>
		/// <param name="file"></param>
		/// <param name="uid"></param>
		private static void ScheduleReconcile(SopInstanceProcessorContext context, DicomFile file, WorkQueueUid uid)
		{
			ImageReconciler reconciler = new ImageReconciler(context);
			reconciler.ScheduleReconcile(file, StudyIntegrityReasonEnum.InconsistentData, uid);
		}
Ejemplo n.º 16
0
 static public WorkQueueUid Insert(IUpdateContext update, WorkQueueUid entity)
 {
     var broker = update.GetBroker<IWorkQueueUidEntityBroker>();
     var updateColumns = new WorkQueueUidUpdateColumns();
     updateColumns.WorkQueueKey = entity.WorkQueueKey;
     updateColumns.Failed = entity.Failed;
     updateColumns.Duplicate = entity.Duplicate;
     updateColumns.FailureCount = entity.FailureCount;
     updateColumns.GroupID = entity.GroupID;
     updateColumns.RelativePath = entity.RelativePath;
     updateColumns.Extension = entity.Extension;
     updateColumns.SeriesInstanceUid = entity.SeriesInstanceUid;
     updateColumns.SopInstanceUid = entity.SopInstanceUid;
     WorkQueueUid newEntity = broker.Insert(updateColumns);
     return newEntity;
 }
        private bool ExistsInStudy(WorkQueueUid uid)
        {
            String path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
            if (File.Exists(path))
                return true;

            // check the study xml
            StudyXml studyXml = StorageLocation.LoadStudyXml();
            return studyXml[uid.SeriesInstanceUid] != null &&
                   studyXml[uid.SeriesInstanceUid][uid.SopInstanceUid] != null;
        }
        private ProcessDuplicateResult ProcessDuplicateReport(DicomFile dupFile, DicomFile baseFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();

            DateTime? dupTime = DateTimeParser.ParseDateAndTime(dupFile.DataSet, 0, DicomTags.InstanceCreationDate,
                                                                DicomTags.InstanceCreationTime);

            DateTime? baseTime = DateTimeParser.ParseDateAndTime(baseFile.DataSet, 0, DicomTags.InstanceCreationDate,
                                                                 DicomTags.InstanceCreationTime);

            if (dupTime.HasValue && baseTime.HasValue)
            {
                if (dupTime.Value <= baseTime.Value)
                {
                    RemoveWorkQueueUid(uid, dupFile.Filename);
                    result.ActionTaken = DuplicateProcessResultAction.Delete;
                    return result;
                }
            }

            result.ActionTaken = DuplicateProcessResultAction.Accept;
            SaveDuplicateReport(uid, dupFile.Filename, baseFile.Filename, dupFile, studyXml);
            return result;
        }
        private void OverwriteExistingInstance(WorkQueueUid uid, ProcessDuplicateAction action)
        {
            if (ExistsInStudy(uid))
            {
                // remove the existing image and update the count
                RemoveExistingImage(uid);
            }

            DicomFile duplicateDicomFile = LoadDuplicateDicomFile(uid, false);
            PreprocessDuplicate(duplicateDicomFile, action);
            AddDuplicateToStudy(duplicateDicomFile, uid, action);
        }
		private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
		{
			var result = new ProcessDuplicateResult();

			var data = uid.SerializeWorkQueueUidData;

			string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid);
			string basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
			if (!File.Exists(basePath))
			{
				// NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
				// no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
				// We have nothing to compare against so let's just throw it into the SIQ queue.
				CreateDuplicateSIQEntry(uid, dupFile, null);
				result.ActionTaken = DuplicateProcessResultAction.Reconcile;
			}
			else
			{
				var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare;

				// Check if system is configured to override the rule for this study
				if (duplicateEnum == DuplicateProcessingEnum.OverwriteSop)
				{
					return OverwriteDuplicate(dupFile, uid, studyXml);
				}

				// Check if system is configured to override the rule for this study
				if (duplicateEnum == DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase)
				{
					return OverwriteAndUpdateDuplicate(dupFile, uid, studyXml);
				}

				var baseFile = new DicomFile(basePath);
				baseFile.Load();

				if (duplicateEnum == DuplicateProcessingEnum.OverwriteReport)
				{
					return ProcessDuplicateReport(dupFile, baseFile, uid, studyXml);
				}

				// DuplicateProcessingEnum.Compare
				if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax))
				{
					// If they're compressed, and we have a codec, lets decompress and still do the comparison
					if (dupFile.TransferSyntax.Encapsulated
					    && !dupFile.TransferSyntax.LossyCompressed
					    && DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null)
					{
						dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
					}

					if (baseFile.TransferSyntax.Encapsulated
					    && !baseFile.TransferSyntax.LossyCompressed
					    && DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null)
					{
						baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
					}

					if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated)
					{
						string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'",
						                               baseFile.TransferSyntax, dupFile.TransferSyntax);

						var list = new List<DicomAttributeComparisonResult>();
						var compareResult = new DicomAttributeComparisonResult
							{
								ResultType = ComparisonResultType.DifferentValues,
								TagName = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name,
								Details = failure
							};
						list.Add(compareResult);
						CreateDuplicateSIQEntry(uid, dupFile, list);
						result.ActionTaken = DuplicateProcessResultAction.Reconcile;
						return result;
					}
				}

				var failureReason = new List<DicomAttributeComparisonResult>();
				if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason))
				{
					Platform.Log(LogLevel.Info,
					             "Duplicate SOP being processed is identical.  Removing SOP: {0}",
					             baseFile.MediaStorageSopInstanceUid);


					RemoveWorkQueueUid(uid, duplicateSopPath);
					result.ActionTaken = DuplicateProcessResultAction.Delete;

				}
				else
				{
					CreateDuplicateSIQEntry(uid, dupFile, failureReason);
					result.ActionTaken = DuplicateProcessResultAction.Reconcile;
				}
			}

            return result;
        }
        private void AddDuplicateToStudy(DicomFile duplicateDicomFile, WorkQueueUid uid, ProcessDuplicateAction action)
        {
            
            var context = new StudyProcessorContext(StorageLocation, WorkQueueItem);
            var sopInstanceProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true };
            string group = uid.GroupID ?? ServerHelper.GetUidGroup(duplicateDicomFile, ServerPartition, WorkQueueItem.InsertTime);

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            int originalInstanceCount = studyXml.NumberOfStudyRelatedInstances;

            bool compare = action != ProcessDuplicateAction.OverwriteAsIs;
            // NOTE: "compare" has no effect for OverwriteUseExisting or OverwriteUseDuplicate
            // because in both cases, the study and the duplicates are modified to be the same.
            ProcessingResult result = sopInstanceProcessor.ProcessFile(group, duplicateDicomFile, studyXml, compare, true, uid, duplicateDicomFile.Filename, SopInstanceProcessorSopType.UpdatedSop);
            if (result.Status == ProcessingStatus.Reconciled)
            {
                throw new ApplicationException("Unexpected status of Reconciled image in duplicate handling!");
            }

            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCount + 1);
            Debug.Assert(File.Exists(StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid)));

        }
        /// <summary>
        /// ProcessSavedFile a specific DICOM file related to a <see cref="WorkQueue"/> request.
        /// </summary>
        /// <param name="queueUid"></param>
        /// <param name="stream">The <see cref="StudyXml"/> file to update with information from the file.</param>
        /// <param name="file">The file being processed.</param>
        /// <param name="compare">Indicates whether to compare the DICOM file against the study in the system.</param>
        protected virtual void ProcessFile(WorkQueueUid queueUid, DicomFile file, StudyXml stream, bool compare)
        {
            var processor = new SopInstanceProcessor(Context) {EnforceNameRules = true};

        	var fileInfo = new FileInfo(file.Filename);
			long fileSize = fileInfo.Length;

			processor.InstanceStats.FileLoadTime.Start();
			processor.InstanceStats.FileLoadTime.End();
			processor.InstanceStats.FileSize = (ulong)fileSize;
			string sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, "File:" + fileInfo.Name);
			processor.InstanceStats.Description = sopInstanceUid;

            string group = queueUid.GroupID ?? ServerHelper.GetUidGroup(file, ServerPartition, WorkQueueItem.InsertTime);

            ProcessingResult result = processor.ProcessFile(group, file, stream, compare, true, queueUid, null, SopInstanceProcessorSopType.NewSop);

            if (result.Status == ProcessingStatus.Reconciled)
            {
                // file has been saved by SopInstanceProcessor in another place for reconcilation
                // Note: SopInstanceProcessor has removed the WorkQueueUid so we
                // only need to delete the file here.
                FileUtils.Delete(fileInfo.FullName);
            }
			
			Statistics.StudyInstanceUid = StorageLocation.StudyInstanceUid;
			if (String.IsNullOrEmpty(processor.Modality) == false)
				Statistics.Modality = processor.Modality;

			// Update the statistics
			Statistics.NumInstances++;
        	Statistics.AddSubStats(processor.InstanceStats);
        }
        private void RemoveExistingImage(WorkQueueUid uid)
        {
            string path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(path))
                return;

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            var file = new DicomFile(path);
            file.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); // don't need to load pixel data cause we will delete it

            #if DEBUG
            int originalInstanceCountInXml = studyXml.NumberOfStudyRelatedInstances;
            int originalStudyInstanceCount = Study.NumberOfStudyRelatedInstances;
            int originalSeriesInstanceCount = Study.Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances;
            #endif

            using (var processor = new ServerCommandProcessor("Delete Existing Image"))
            {
                var seriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].ToString();
                var sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].ToString();

                processor.AddCommand(new FileDeleteCommand(path,true));
                processor.AddCommand(new RemoveInstanceFromStudyXmlCommand(StorageLocation, studyXml, seriesInstanceUid, sopInstanceUid));
                processor.AddCommand(new UpdateInstanceCountCommand(StorageLocation, seriesInstanceUid,sopInstanceUid));

                if (!processor.Execute())
                {
                    throw new ApplicationException(String.Format("Unable to remove existing image {0}", file.Filename), processor.FailureException);
                }
            }

            #if DEBUG
            Debug.Assert(!File.Exists(path));
            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCountInXml - 1);
            Debug.Assert(Study.Load(Study.Key).NumberOfStudyRelatedInstances == originalStudyInstanceCount - 1);
            Debug.Assert(Study.Load(Study.Key).Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances == originalSeriesInstanceCount - 1);
            #endif
        }
        /// <summary>
        /// Apply changes to the file prior to processing it.
        /// </summary>
        /// <param name="uid"></param>
        /// <param name="file"></param>
        protected virtual InstancePreProcessingResult PreProcessFile(WorkQueueUid uid, DicomFile file)
        {
            String contextID = uid.GroupID ?? String.Format("{0}_{1}",
                String.IsNullOrEmpty(file.SourceApplicationEntityTitle) ? ServerPartition.AeTitle : file.SourceApplicationEntityTitle, 
                WorkQueueItem.InsertTime.ToString("yyyyMMddHHmmss"));

            var result = new InstancePreProcessingResult();
            
            var patientNameRules = new PatientNameRules(Study);
            UpdateItem updateItem = patientNameRules.Apply(file);

            result.Modified = updateItem != null;

            var autoBaseReconciler = new AutoReconciler(contextID, StorageLocation);
            InstancePreProcessingResult reconcileResult = autoBaseReconciler.Process(file);
            result.AutoReconciled = reconcileResult != null;
            result.Modified |= reconcileResult != null;
            
            if (reconcileResult!=null && reconcileResult.DiscardImage)
            {
                result.DiscardImage = true;
            }

            // if the studyuid is modified, the file will be deleted by the caller.
            if (file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid))
            {
                if (result.Modified)
                    file.Save();
            }

            
            return result;
        }
        private DicomFile LoadDuplicateDicomFile(WorkQueueUid uid, bool skipPixelData)
        {
            FileInfo duplicateFile = GetDuplicateSopFile(uid);
            Platform.CheckTrue(duplicateFile.Exists, String.Format("Duplicate SOP doesn't exist at {0}", uid.SopInstanceUid));
            DicomFile file = new DicomFile(duplicateFile.FullName);

            file.Load(skipPixelData ? DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default : DicomReadOptions.Default);
            return file;
        }
        /// <summary>
        /// Called before the specified <see cref="WorkQueueUid"/> is processed
        /// </summary>
        /// <param name="item">The <see cref="WorkQueue"/> item being processed</param>
        /// <param name="uid">The <see cref="WorkQueueUid"/> being processed</param>
        protected virtual void OnProcessUidBegin(Model.WorkQueue item, WorkQueueUid uid)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(uid, "uid");

        }
        private FileInfo GetDuplicateSopFile(WorkQueueUid uid)
        {
            string path = DuplicateFolder;

            if (string.IsNullOrEmpty(uid.RelativePath))
            {
                path = Path.Combine(path, StorageLocation.StudyInstanceUid);
                path = Path.Combine(path, uid.SopInstanceUid + "." + uid.Extension);
            }
            else path = Path.Combine(path, uid.RelativePath);

            return new FileInfo(path);
        }
Ejemplo n.º 28
0
        /// <summary>
        /// Inserts a <see cref="StudyIntegrityQueue"/> entry for manual reconciliation.
        /// </summary>
        /// <param name="file">The DICOM file that needs to be reconciled.</param>
        /// <param name="reason">The type of <see cref="StudyIntegrityQueue"/> entry to be inserted.</param>
        /// <param name="uid">A UID to delete on insert.</param>
        /// <remarks>
        /// A copy of the DICOM file will be stored in a special folder allocated for 
        /// reconciliation purpose. The caller is responsible for managing the original copy.
        /// </remarks>
		public void ScheduleReconcile(DicomFile file, StudyIntegrityReasonEnum reason, WorkQueueUid uid)
		{
            Platform.CheckForNullReference(_context.StudyLocation, "_context.StudyLocation");
          
            Platform.Log(LogLevel.Info, "Scheduling new manual reconciliation for SOP {0}", file.MediaStorageSopInstanceUid);
            ServerFilesystemInfo fs = FilesystemMonitor.Instance.GetFilesystemInfo(_context.StudyLocation.FilesystemKey);
            Platform.CheckForNullReference(fs, "fs");
            
            ReconcileStorage reconcileStorage = new ReconcileStorage(_context.StudyLocation, _context.Group); 

            using(ServerCommandProcessor processor = new ServerCommandProcessor("Schedule Manual Reconciliation"))
            {
            	string path = reconcileStorage.GetSopInstancePath(file.DataSet[DicomTags.SopInstanceUid].ToString());
                DirectoryInfo dir = new DirectoryInfo(path);
				if (dir.Parent != null)
				{
					CreateDirectoryCommand mkdir = new CreateDirectoryCommand(dir.Parent.FullName);
					processor.AddCommand(mkdir);
				}

            	SaveDicomFileCommand saveFileCommand = new SaveDicomFileCommand(path, file, true);
                processor.AddCommand(saveFileCommand);

                InsertSIQCommand updateStudyCommand = new InsertSIQCommand(_context.StudyLocation, reason, file, _context.Group, reconcileStorage);
                processor.AddCommand(updateStudyCommand);

				if (uid != null)
					processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                if (processor.Execute() == false)
                {
                    throw new ApplicationException(String.Format("Unable to schedule image reconcilation : {0}", processor.FailureReason), processor.FailureException);
                }
            }
		}
Ejemplo n.º 29
0
    	/// <summary>
        /// Delete an entry in the <see cref="WorkQueueUid"/> table.
        /// </summary>
        /// <param name="sop">The <see cref="WorkQueueUid"/> entry to delete.</param>
        protected virtual void DeleteWorkQueueUid(WorkQueueUid sop)
        {
            // Must retry in case of db error.
            // Failure to do so may lead to orphaned WorkQueueUid and FileNotFoundException 
            // when the work queue is reset.
    	    int retryCount = 0;
            while (true)
            {
                try
                {
                    TimeSpanStatistics time = TimeSpanStatisticsHelper.Measure(
                        delegate
                            {
                                using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                                {
                                    IWorkQueueUidEntityBroker delete = updateContext.GetBroker<IWorkQueueUidEntityBroker>();

                                    delete.Delete(sop.GetKey());
                                    updateContext.Commit();
                                }
                        });

                    DBUpdateTime.Add(time);
                    break; // done
        }
                catch (Exception ex)
                {
                    if (ex is PersistenceException || ex is SqlException)
                    {
                        if (retryCount > MAX_DB_RETRY)
                        {
                            Platform.Log(LogLevel.Error, ex, "Error occurred when calling DeleteWorkQueueUid. Max db retry count has been reached.");
                            WorkQueueItem.FailureDescription = String.Format("Error occurred when calling DeleteWorkQueueUid. Max db retry count has been reached.");
                            PostProcessingFailure(WorkQueueItem, WorkQueueProcessorFailureType.Fatal);
                            return;
                        }

                        Platform.Log(LogLevel.Error, ex, "Error occurred when calling DeleteWorkQueueUid(). Retry later. SOPUID={0}", sop.SopInstanceUid);
                        SleepForRetry();


                        // Service is stoping
                        if (CancelPending)
                        {
                            Platform.Log(LogLevel.Warn, "Termination Requested. DeleteWorkQueueUid() is now terminated.");
                            break;
                        }
                        retryCount++;
                    }
                    else
                        throw;
                }
            }

        }
		public static void FailUid(WorkQueueUid sop, bool retry)
		{
			using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
			{
				IWorkQueueUidEntityBroker uidUpdateBroker = updateContext.GetBroker<IWorkQueueUidEntityBroker>();
				WorkQueueUidUpdateColumns columns = new WorkQueueUidUpdateColumns();
				if (!retry)
					columns.Failed = true;
				else
				{
					if (sop.FailureCount >= ImageServerCommonConfiguration.WorkQueueMaxFailureCount)
					{
						columns.Failed = true;
					}
					else
					{
						columns.FailureCount = ++sop.FailureCount;
					}
				}

				uidUpdateBroker.Update(sop.GetKey(), columns);
				updateContext.Commit();
			}
		}
Ejemplo n.º 31
0
        /// <summary>
        /// Update an entry in the <see cref="WorkQueueUid"/> table.
        /// </summary>
        /// <remarks>
        /// Note that just the Duplicate, Failed, FailureCount, and Extension columns are updated from the
        /// input parameter <paramref name="sop"/>.
        /// </remarks>
        /// <param name="sop">The <see cref="WorkQueueUid"/> entry to update.</param>
        protected virtual void UpdateWorkQueueUid(WorkQueueUid sop)
        {
            DBUpdateTime.Add(
                TimeSpanStatisticsHelper.Measure(
                delegate
                {
                     using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                     {
                         IWorkQueueUidEntityBroker update = updateContext.GetBroker<IWorkQueueUidEntityBroker>();

                         WorkQueueUidUpdateColumns columns = new WorkQueueUidUpdateColumns
                                                                 {
                                                                     Duplicate = sop.Duplicate,
                                                                     Failed = sop.Failed,
                                                                     FailureCount = sop.FailureCount
                                                                 };

                         if (sop.Extension != null)
                             columns.Extension = sop.Extension;

                         update.Update(sop.GetKey(), columns);

                         updateContext.Commit();
                     }
                }));

            
        }
		private void InsertInstance(DicomFile file, StudyXml stream, WorkQueueUid uid, string deleteFile, SopInstanceProcessorSopType sopType)
		{
			using (var processor = new ServerCommandProcessor("Processing WorkQueue DICOM file"))
			{
			    EventsHelper.Fire(OnInsertingSop, this, new SopInsertingEventArgs {Processor = processor });

				InsertInstanceCommand insertInstanceCommand = null;
				InsertStudyXmlCommand insertStudyXmlCommand = null;

				String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, String.Empty);
				_modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty);

				if (_context.UpdateCommands.Count > 0)
				{
					foreach (BaseImageLevelUpdateCommand command in _context.UpdateCommands)
					{
						command.File = file;
						processor.AddCommand(command);
					}
				}
				try
				{
					// Create a context for applying actions from the rules engine
					ServerActionContext context =
						new ServerActionContext(file, _context.StorageLocation.FilesystemKey, _context.Partition, _context.StorageLocation.Key);
					context.CommandProcessor = processor;

					_context.SopCompressionRulesEngine.Execute(context);
                    String seriesUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty);
                    String sopUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty);
                    String finalDest = _context.StorageLocation.GetSopInstancePath(seriesUid, sopUid);

					if (_context.UpdateCommands.Count > 0)
					{
						processor.AddCommand(new SaveDicomFileCommand(_context.StorageLocation, file, file.Filename != finalDest));
					}
					else if (file.Filename != finalDest || processor.CommandCount > 0)
                    {
						// Have to be careful here about failure on exists vs. not failing on exists
						// because of the different use cases of the importer.
                        // save the file in the study folder, or if its been compressed
						processor.AddCommand(new SaveDicomFileCommand(finalDest, file, file.Filename != finalDest));
                    }

					// Update the StudyStream object
					insertStudyXmlCommand = new InsertStudyXmlCommand(file, stream, _context.StorageLocation);
					processor.AddCommand(insertStudyXmlCommand);

					// Have the rules applied during the command processor, and add the objects.
					processor.AddCommand(new ApplySopRulesCommand(context,_context.SopProcessedRulesEngine));

					// If specified, delete the file
					if (deleteFile != null)
						processor.AddCommand(new FileDeleteCommand(deleteFile, true));

					// Insert into the database, but only if its not a duplicate so the counts don't get off
					insertInstanceCommand = new InsertInstanceCommand(file, _context.StorageLocation);
					processor.AddCommand(insertInstanceCommand);
					
					// Do a check if the StudyStatus value should be changed in the StorageLocation.  This
					// should only occur if the object has been compressed in the previous steps.
					processor.AddCommand(new UpdateStudyStatusCommand(_context.StorageLocation, file));

					if (uid!=null)
						processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

					// Do the actual processing
					if (!processor.Execute())
					{
						Platform.Log(LogLevel.Error, "Failure processing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid);
						Platform.Log(LogLevel.Error, "File that failed processing: {0}", file.Filename);
						throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException);
					}
					Platform.Log(ServerPlatform.InstanceLogLevel, "Processed SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid, patientsName);

					// Fire NewSopEventArgs or UpdateSopEventArgs Event
					// Know its a duplicate if we have to delete the duplicate object
					if (sopType == SopInstanceProcessorSopType.NewSop)
						EventManager.FireEvent(this, new NewSopEventArgs { File = file, ServerPartitionEntry = _context.Partition, WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize });
					else if (sopType == SopInstanceProcessorSopType.UpdatedSop)
						EventManager.FireEvent(this, new UpdateSopEventArgs {File = file,ServerPartitionEntry = _context.Partition,WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize});
				}
				catch (Exception e)
				{
					Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}.  Rolling back operation.",
					             processor.Description);
					processor.Rollback();
					if (sopType == SopInstanceProcessorSopType.NewSop)
						EventManager.FireEvent(this, new FailedNewSopEventArgs { File = file, ServerPartitionEntry = _context.Partition, WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize, FailureMessage = e.Message });
					else
						EventManager.FireEvent(this, new FailedUpdateSopEventArgs { File = file, ServerPartitionEntry = _context.Partition, WorkQueueUidEntry = uid, WorkQueueEntry = _context.WorkQueueEntry, FileLength = InstanceStats.FileSize, FailureMessage = e.Message });
					throw new ApplicationException("Unexpected exception when processing file.", e);
				}
				finally
				{
					if (insertInstanceCommand != null && insertInstanceCommand.Statistics.IsSet)
						_instanceStats.InsertDBTime.Add(insertInstanceCommand.Statistics);
					if (insertStudyXmlCommand != null && insertStudyXmlCommand.Statistics.IsSet)
						_instanceStats.InsertStreamTime.Add(insertStudyXmlCommand.Statistics);
				}
			}
		}
Ejemplo n.º 33
0
        /// <summary>
        /// Routine for failing a work queue uid record.
        /// </summary>
        /// <param name="uid">The WorkQueueUid record to fail.</param>
        /// <param name="retry">A boolean value indicating whether a retry will be attempted later.</param>
        protected void FailWorkQueueUid(WorkQueueUid uid, bool retry)
        {
            using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
            {
                IWorkQueueUidEntityBroker uidUpdateBroker = updateContext.GetBroker<IWorkQueueUidEntityBroker>();
                WorkQueueUidUpdateColumns columns = new WorkQueueUidUpdateColumns();
                if (!retry)
                    columns.Failed = true;
                else
                {
                    if (uid.FailureCount >= ImageServerCommonConfiguration.WorkQueueMaxFailureCount)
                    {
                        columns.Failed = true;
                    }
                    else
                    {
                        columns.FailureCount = ++uid.FailureCount;
                    }
                }

                if (uidUpdateBroker.Update(uid.GetKey(), columns))
                    updateContext.Commit();
                else
                    throw new ApplicationException(String.Format("FailUid(): Unable to update work queue uid {0}", uid.Key));
            }
            
        }