示例#1
0
        public void Insert(Guid Guid, Guid WorkQueueGUID, string SeriesInstanceUid, string SopInstanceUid, bool Failed,
                           bool Duplicate, string Extension, short FailureCount, string GroupID, string RelativePath)
        {
            var item = new WorkQueueUid();

            item.Guid = Guid;

            item.WorkQueueGUID = WorkQueueGUID;

            item.SeriesInstanceUid = SeriesInstanceUid;

            item.SopInstanceUid = SopInstanceUid;

            item.Failed = Failed;

            item.Duplicate = Duplicate;

            item.Extension = Extension;

            item.FailureCount = FailureCount;

            item.GroupID = GroupID;

            item.RelativePath = RelativePath;


            item.Save(UserName);
        }
示例#2
0
        private void ProcessUid(WorkQueueUid uid)
        {
            Platform.CheckForNullReference(uid, "uid");

            string imagePath = GetUidPath(uid);

            using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid)))
            {
                // If the file for some reason doesn't exist, we just ignore it
                if (File.Exists(imagePath))
                {
                    Platform.Log(ServerPlatform.InstanceLogLevel, "Deleting {0}", imagePath);
                    FileDeleteCommand deleteFile = new FileDeleteCommand(imagePath, true);
                    processor.AddCommand(deleteFile);
                }
                else
                {
                    Platform.Log(LogLevel.Warn, "WARNING {0} is missing.", imagePath);
                }

                DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(uid);
                processor.AddCommand(deleteUid);
                if (!processor.Execute())
                {
                    throw new Exception(String.Format("Unable to delete image {0}", uid.SopInstanceUid));
                }
            }
        }
示例#3
0
        /// <summary>
        /// Helper method to return the path to the duplicate image (in the Reconcile folder)
        /// </summary>
        /// <param name="studyStorage"></param>
        /// <param name="sop"></param>
        /// <returns></returns>
        public static String GetDuplicateUidPath(StudyStorageLocation studyStorage, WorkQueueUid sop)
        {
            string dupPath = GetDuplicateGroupPath(studyStorage, sop);

            dupPath = string.IsNullOrEmpty(sop.RelativePath)
                        ? Path.Combine(dupPath,
                                       Path.Combine(studyStorage.StudyInstanceUid, sop.SopInstanceUid + "." + sop.Extension))
                        : Path.Combine(dupPath, sop.RelativePath);

            #region BACKWARD_COMPATIBILTY_CODE

            if (string.IsNullOrEmpty(sop.RelativePath) && !File.Exists(dupPath))
            {
                string basePath = Path.Combine(studyStorage.GetStudyPath(), sop.SeriesInstanceUid);
                basePath = Path.Combine(basePath, sop.SopInstanceUid);
                if (sop.Extension != null)
                {
                    dupPath = basePath + "." + sop.Extension;
                }
                else
                {
                    dupPath = basePath + ".dcm";
                }
            }

            #endregion

            return(dupPath);
        }
示例#4
0
        /// <summary>
        /// Create Duplicate SIQ Entry
        /// </summary>
        /// <param name="file"></param>
        /// <param name="location"></param>
        /// <param name="sourcePath"></param>
        /// <param name="queue"></param>
        /// <param name="uid"></param>
        /// <param name="data"></param>
        public static void CreateDuplicateSIQEntry(DicomFile file, StudyStorageLocation location, string sourcePath,
                                                   WorkQueue queue, WorkQueueUid uid, StudyProcessWorkQueueData data)
        {
            Platform.Log(LogLevel.Info, "Creating Work Queue Entry for duplicate...");
            String uidGroup = queue.GroupID ?? queue.GetKey().Key.ToString();

            using (var commandProcessor = new ServerCommandProcessor("Insert Work Queue entry for duplicate"))
            {
                commandProcessor.AddCommand(new FileDeleteCommand(sourcePath, true));

                var sopProcessingContext     = new SopInstanceProcessorContext(commandProcessor, location, uidGroup);
                DicomProcessingResult result = Process(sopProcessingContext, file, data);
                if (!result.Successful)
                {
                    FailUid(uid, true);
                    return;
                }

                commandProcessor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                if (!commandProcessor.Execute())
                {
                    Platform.Log(LogLevel.Error, "Unexpected error when creating duplicate study integrity queue entry: {0}",
                                 commandProcessor.FailureReason);
                    FailUid(uid, true);
                }
            }
        }
示例#5
0
        /// <summary>
        /// Gets the path to DICOM file referenced by the <see cref="WorkQueueUid"/>. This is either the study folder or the reconcile folder.
        /// </summary>
        /// <param name="sop"></param>
        /// <returns></returns>
        protected string GetFileStoredPath(WorkQueueUid uid)
        {
            Platform.CheckForNullReference(uid, "uid");

            if (!uid.Duplicate)
            {
                return(StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid));
            }
            else
            {
                // For StudyProcess (which this processor is designed to clean up), duplicates are stored in the Reconcile folder (see SopInstanceImporter)
                string path = Path.Combine(StorageLocation.FilesystemPath, StorageLocation.PartitionFolder);
                path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder);
                path = Path.Combine(path, uid.GroupID);

                if (string.IsNullOrEmpty(uid.RelativePath))
                {
                    path = Path.Combine(path, StorageLocation.StudyInstanceUid);
                    var extension = uid.Extension ?? ServerPlatform.DicomFileExtension;
                    path = Path.Combine(path, uid.SopInstanceUid + "." + extension);
                }
                else
                {
                    path = Path.Combine(path, uid.RelativePath);
                }

                return(path);
            }
        }
        private ProcessDuplicateResult OverwriteDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

            var result = new ProcessDuplicateResult();

            result.ActionTaken = DuplicateProcessResultAction.Accept;

            using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
            {
                var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
                processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

                // Ideally we don't need to insert the instance into the database since it's a duplicate.
                // However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
                // For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
                // But it will be corrected at the end of the processing when the study verification is executed.
                processor.AddCommand(new InsertInstanceCommand(dupFile, Context.StorageLocation));

                // Update the StudyStream object
                processor.AddCommand(new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                if (!processor.Execute())
                {
                    // cause the item to fail
                    throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
                }
            }

            return(result);
        }
示例#7
0
        protected string GetReconcileUidPath(WorkQueueUid sop)
        {
            // In 2.0: Path = \\Filesystem Path\Reconcile\GroupID\StudyInstanceUid\*.dcm
            WorkQueue workqueueItem = Context.WorkQueueItem;

            if (!String.IsNullOrEmpty(workqueueItem.GroupID))
            {
                StudyStorageLocation storageLocation = Context.WorkQueueItemStudyStorage;
                string path = Path.Combine(storageLocation.FilesystemPath, storageLocation.PartitionFolder);
                path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder);
                path = Path.Combine(path, workqueueItem.GroupID);
                path = Path.Combine(path, storageLocation.StudyInstanceUid);
                path = Path.Combine(path, sop.SopInstanceUid + ServerPlatform.DicomFileExtension);
                return(path);
            }
            else
            {
                #region BACKWARD-COMPATIBLE CODE
                // 1.5 SP1, RelativePath is NOT populated for Reconcile Study entry
                // Path = \\Filesystem Path\Reconcile\GUID\*.dcm
                // where \\Filesystem Path\Reconcile\GUID = Context.ReconcileWorkQueueData.StoragePath
                if (String.IsNullOrEmpty(sop.RelativePath))
                {
                    string path = Context.ReconcileWorkQueueData.StoragePath;
                    path = Path.Combine(path, sop.SopInstanceUid + ServerPlatform.DicomFileExtension);
                    return(path);
                }

                // will this ever happen?
                return(Path.Combine(Context.ReconcileWorkQueueData.StoragePath, sop.RelativePath));

                #endregion
            }
        }
        private void AddDuplicateToStudy(DicomFile duplicateDicomFile, WorkQueueUid uid, ProcessDuplicateAction action)
        {
            var context = new StudyProcessorContext(StorageLocation, WorkQueueItem);
            var sopInstanceProcessor = new SopInstanceProcessor(context)
            {
                EnforceNameRules = true
            };
            string group = uid.GroupID ?? ServerHelper.GetUidGroup(duplicateDicomFile, ServerPartition, WorkQueueItem.InsertTime);

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            int      originalInstanceCount = studyXml.NumberOfStudyRelatedInstances;

            bool compare = action != ProcessDuplicateAction.OverwriteAsIs;
            // NOTE: "compare" has no effect for OverwriteUseExisting or OverwriteUseDuplicate
            // because in both cases, the study and the duplicates are modified to be the same.
            ProcessingResult result = sopInstanceProcessor.ProcessFile(group, duplicateDicomFile, studyXml, compare, true, uid, duplicateDicomFile.Filename, SopInstanceProcessorSopType.UpdatedSop);

            if (result.Status == ProcessingStatus.Reconciled)
            {
                throw new ApplicationException("Unexpected status of Reconciled image in duplicate handling!");
            }

            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCount + 1);
            Debug.Assert(File.Exists(StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid)));
        }
示例#9
0
        public static void FailUid(WorkQueueUid sop, bool retry)
        {
            using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
            {
                IWorkQueueUidEntityBroker uidUpdateBroker = updateContext.GetBroker <IWorkQueueUidEntityBroker>();
                WorkQueueUidUpdateColumns columns         = new WorkQueueUidUpdateColumns();
                if (!retry)
                {
                    columns.Failed = true;
                }
                else
                {
                    if (sop.FailureCount >= ImageServerCommonConfiguration.WorkQueueMaxFailureCount)
                    {
                        columns.Failed = true;
                    }
                    else
                    {
                        columns.FailureCount = ++sop.FailureCount;
                    }
                }

                uidUpdateBroker.Update(sop.GetKey(), columns);
                updateContext.Commit();
            }
        }
        private void ProcessUid(WorkQueueUid uid)
        {
            switch (_processDuplicateEntry.QueueData.Action)
            {
            case ProcessDuplicateAction.Delete:
                DeleteDuplicate(uid);
                break;

            case ProcessDuplicateAction.OverwriteUseDuplicates:
                OverwriteExistingInstance(uid, ProcessDuplicateAction.OverwriteUseDuplicates);
                break;

            case ProcessDuplicateAction.OverwriteUseExisting:
                OverwriteExistingInstance(uid, ProcessDuplicateAction.OverwriteUseExisting);
                break;

            case ProcessDuplicateAction.OverwriteAsIs:
                OverwriteExistingInstance(uid, ProcessDuplicateAction.OverwriteAsIs);
                break;

            default:
                throw new NotSupportedException(
                          String.Format("Not supported action: {0}", _processDuplicateEntry.QueueData.Action));
            }
        }
示例#11
0
        private string GetUidPath(WorkQueueUid sop)
        {
            string imagePath = Path.Combine(_reconcileQueueData.StoragePath, sop.SopInstanceUid + ServerPlatform.DicomFileExtension);

            Debug.Assert(String.IsNullOrEmpty(imagePath) == false);

            return(imagePath);
        }
        private DicomFile LoadDuplicateDicomFile(WorkQueueUid uid, bool skipPixelData)
        {
            FileInfo duplicateFile = GetDuplicateSopFile(uid);

            Platform.CheckTrue(duplicateFile.Exists, String.Format("Duplicate SOP doesn't exist at {0}", uid.SopInstanceUid));
            DicomFile file = new DicomFile(duplicateFile.FullName);

            file.Load(skipPixelData ? DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default : DicomReadOptions.Default);
            return(file);
        }
        private void OverwriteExistingInstance(WorkQueueUid uid, ProcessDuplicateAction action)
        {
            if (ExistsInStudy(uid))
            {
                // remove the existing image and update the count
                RemoveExistingImage(uid);
            }

            DicomFile duplicateDicomFile = LoadDuplicateDicomFile(uid, false);

            PreprocessDuplicate(duplicateDicomFile, action);
            AddDuplicateToStudy(duplicateDicomFile, uid, action);
        }
        void SaveDuplicateReport(WorkQueueUid uid, string sourceFile, string destinationFile, DicomFile dupFile, StudyXml studyXml)
        {
            using (var processor = new ServerCommandProcessor("Save duplicate report"))
            {
                processor.AddCommand(new RenameFileCommand(sourceFile, destinationFile, false));

                // Update the StudyStream object
                processor.AddCommand(new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.Execute();
            }
        }
 private void DeleteDuplicate(WorkQueueUid uid)
 {
     using (ServerCommandProcessor processor = new ServerCommandProcessor("Delete Received Duplicate"))
     {
         FileInfo duplicateFile = GetDuplicateSopFile(uid);
         processor.AddCommand(new FileDeleteCommand(duplicateFile.FullName, true));
         processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
         if (!processor.Execute())
         {
             throw new ApplicationException(processor.FailureReason, processor.FailureException);
         }
         Platform.Log(ServerPlatform.InstanceLogLevel, "Discard duplicate SOP {0} in {1}", uid.SopInstanceUid, duplicateFile.FullName);
     }
 }
        private ProcessDuplicateResult OverwriteDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

            var result = new ProcessDuplicateResult();

            result.ActionTaken = DuplicateProcessResultAction.Accept;

            using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
            {
                var context = new ServerActionContext(dupFile, Context.StorageLocation.FilesystemKey, Context.StorageLocation.ServerPartition, Context.StorageLocation.Key, processor);

                var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
                processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

                // Do so that the FileSize calculation inInsertStudyXmlCommand works
                dupFile.Filename = destination;

                // Update the StudyStream object
                var insertStudyXmlCommand = new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation);
                processor.AddCommand(insertStudyXmlCommand);

                // Ideally we don't need to insert the instance into the database since it's a duplicate.
                // However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
                // For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
                // But it will be corrected at the end of the processing when the study verification is executed.
                processor.AddCommand(new InsertInstanceCommand(dupFile, Context.StorageLocation));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.AddCommand(new ApplySopRulesCommand(context, Context.SopProcessedRulesEngine));

                if (!processor.Execute())
                {
                    EventManager.FireEvent(this, new FailedUpdateSopEventArgs {
                        File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason
                    });

                    // cause the item to fail
                    throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
                }

                EventManager.FireEvent(this, new UpdateSopEventArgs {
                    File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize
                });
            }

            return(result);
        }
        private bool ExistsInStudy(WorkQueueUid uid)
        {
            String path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (File.Exists(path))
            {
                return(true);
            }

            // check the study xml
            StudyXml studyXml = StorageLocation.LoadStudyXml();

            return(studyXml[uid.SeriesInstanceUid] != null &&
                   studyXml[uid.SeriesInstanceUid][uid.SopInstanceUid] != null);
        }
        private FileInfo GetDuplicateSopFile(WorkQueueUid uid)
        {
            string path = DuplicateFolder;

            if (string.IsNullOrEmpty(uid.RelativePath))
            {
                path = Path.Combine(path, StorageLocation.StudyInstanceUid);
                path = Path.Combine(path, uid.SopInstanceUid + "." + uid.Extension);
            }
            else
            {
                path = Path.Combine(path, uid.RelativePath);
            }

            return(new FileInfo(path));
        }
示例#19
0
        /// <summary>
        /// Gets the base directory for the <see cref="WorkQueueUid"/> where the corresponding file is stored (see
        /// <see cref="GetFileStoredPath"/>)
        /// </summary>
        /// <param name="sop"></param>
        /// <returns></returns>
        protected string GetBaseDirectory(WorkQueueUid sop)
        {
            if (!sop.Duplicate)
            {
                return(StorageLocation.GetStudyPath());
            }
            else
            {
                string path = Path.Combine(StorageLocation.FilesystemPath, StorageLocation.PartitionFolder);
                path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder);
                path = Path.Combine(path, sop.GroupID);

                Debug.Assert(GetFileStoredPath(sop).IndexOf(path) == 0, "Should be consistent with what GetFileStoredPath() returns");
                return(path);
            }
        }
        /// <summary>
        /// Called after the specified <see cref="WorkQueueUid"/> has been processed
        /// </summary>
        /// <param name="item">The <see cref="WorkQueue"/> item being processed</param>
        /// <param name="uid">The <see cref="WorkQueueUid"/> being processed</param>
        protected virtual void OnProcessUidEnd(Model.WorkQueue item, WorkQueueUid uid)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(uid, "uid");

            if (uid.Duplicate)
            {
                String dupPath = ServerPlatform.GetDuplicateUidPath(StorageLocation, uid);
                // Delete the container if it's empty
                var f = new FileInfo(dupPath);

                if (f.Directory != null && DirectoryUtility.DeleteIfEmpty(f.Directory.FullName))
                {
                    DirectoryUtility.DeleteIfEmpty(ServerPlatform.GetDuplicateGroupPath(StorageLocation, uid));
                }
            }
        }
        private static void RemoveWorkQueueUid(WorkQueueUid uid, string fileToDelete)
        {
            using (var processor = new ServerCommandProcessor("Remove Work Queue Uid"))
            {
                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
                if (String.IsNullOrEmpty(fileToDelete) == false)
                {
                    processor.AddCommand(new FileDeleteCommand(fileToDelete, true));
                }

                if (!processor.Execute())
                {
                    String error = String.Format("Unable to delete Work Queue Uid {0}: {1}", uid.Key, processor.FailureReason);
                    Platform.Log(LogLevel.Error, error);
                    throw new ApplicationException(error, processor.FailureException);
                }
            }
        }
        private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();

            var data = uid.SerializeWorkQueueUidData;

            var basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(basePath))
            {
                // NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
                // no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
                // We have nothing to compare against so let's just throw it into the SIQ queue.
                CreateDuplicateSIQEntry(uid, dupFile, null);
                result.ActionTaken = DuplicateProcessResultAction.Reconcile;
            }
            else
            {
                var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare;

                switch (duplicateEnum)
                {
                case DuplicateProcessingEnum.OverwriteSop:
                    // Note: There's actually no difference between OverwriteDuplicate and OverwriteAndUpdateDuplicate. But decided to leave it as is.
                    return(OverwriteDuplicate(dupFile, uid, studyXml));

                case DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase:
                    return(OverwriteAndUpdateDuplicate(dupFile, uid, studyXml));

                case DuplicateProcessingEnum.OverwriteReport:
                    var file = new DicomFile(basePath);
                    return(ProcessDuplicateReport(dupFile, file, uid, studyXml));

                case DuplicateProcessingEnum.Compare:
                    var baseFile = new DicomFile(basePath);
                    return(CompareDuplicates(dupFile, baseFile, uid));

                default:
                    throw new InvalidOperationException("");
                }
            }

            return(result);
        }
        private FileInfo GetDuplicateSopFile(WorkQueueUid uid)
        {
            string path = Path.Combine(StorageLocation.FilesystemPath, StorageLocation.PartitionFolder);

            path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder);
            path = Path.Combine(path, WorkQueueItem.GroupID);

            if (string.IsNullOrEmpty(uid.RelativePath))
            {
                path = Path.Combine(path, StorageLocation.StudyInstanceUid);
                path = Path.Combine(path, uid.SopInstanceUid + "." + uid.Extension);
            }
            else
            {
                path = Path.Combine(path, uid.RelativePath);
            }

            return(new FileInfo(path));
        }
示例#24
0
        /// <summary>
        /// Gets the path of the specified <see cref="WorkQueueUid"/>.
        /// Note: only StudyProcess is currently supported. Other type of WQI will cause InvalidOperationException
        /// </summary>
        /// <param name="item"></param>
        /// <param name="uid"></param>
        /// <returns></returns>
        public static FilesystemDynamicPath GetWorkQueueUidPath(this WorkQueue item, WorkQueueUid uid)
        {
            // Check for invalid use of this method
            if (!uid.WorkQueueKey.Equals(item.Key))
            {
                throw new InvalidOperationException("uid.WorkQueueKey and item.Key do not match");
            }

            var studyStorage = item.StudyStorage;

            // Logic for determining WorkQueueUid path for StudyProcess WQI
            #region Logic forStudyProcess

            if (item.WorkQueueTypeEnum == WorkQueueTypeEnum.StudyProcess)
            {
                if (!uid.Duplicate)
                {
                    var path = Path.Combine(uid.SeriesInstanceUid, uid.SopInstanceUid + ServerPlatform.DicomFileExtension);
                    return(new FilesystemDynamicPath(path, FilesystemDynamicPath.PathType.RelativeToStudyFolder));
                }
                else
                {
                    // full path = \\FS\Partition\Reconcile\UidGroup\sopUID.ext
                    // relative path = UidGroup\sopUID.ext
                    var ext = string.IsNullOrEmpty(uid.Extension) ? ServerPlatform.DicomFileExtension : uid.Extension;

                    var path = uid.GroupID;
                    if (string.IsNullOrEmpty(uid.RelativePath))
                    {
                        path = Path.Combine(path, studyStorage.StudyInstanceUid);
                        path = Path.Combine(path, uid.SopInstanceUid + "." + ext);
                    }
                    else
                    {
                        path = Path.Combine(path, uid.RelativePath);
                    }
                    return(new FilesystemDynamicPath(path, FilesystemDynamicPath.PathType.RelativeToReconcileFolder));
                }
            }
            #endregion

            throw new InvalidOperationException(string.Format("GetWorkQueueUidPath should not be used for {0} WQI", item.WorkQueueTypeEnum));
        }
        private void RemoveExistingImage(WorkQueueUid uid)
        {
            string path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(path))
            {
                return;
            }

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            var      file     = new DicomFile(path);

            file.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); // don't need to load pixel data cause we will delete it

            #if DEBUG
            int originalInstanceCountInXml  = studyXml.NumberOfStudyRelatedInstances;
            int originalStudyInstanceCount  = Study.NumberOfStudyRelatedInstances;
            int originalSeriesInstanceCount = Study.Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances;
            #endif

            using (var processor = new ServerCommandProcessor("Delete Existing Image"))
            {
                var seriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].ToString();
                var sopInstanceUid    = file.DataSet[DicomTags.SopInstanceUid].ToString();

                processor.AddCommand(new FileDeleteCommand(path, true));
                processor.AddCommand(new RemoveInstanceFromStudyXmlCommand(StorageLocation, studyXml, seriesInstanceUid, sopInstanceUid));
                processor.AddCommand(new UpdateInstanceCountCommand(StorageLocation, seriesInstanceUid, sopInstanceUid));

                if (!processor.Execute())
                {
                    throw new ApplicationException(String.Format("Unable to remove existing image {0}", file.Filename), processor.FailureException);
                }
            }

            #if DEBUG
            Debug.Assert(!File.Exists(path));
            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCountInXml - 1);
            Debug.Assert(Study.Load(Study.Key).NumberOfStudyRelatedInstances == originalStudyInstanceCount - 1);
            Debug.Assert(Study.Load(Study.Key).Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances == originalSeriesInstanceCount - 1);
            #endif
        }
        /// <summary>
        /// Gets the commands to update the study
        /// </summary>
        /// <returns></returns>
        private List <BaseImageLevelUpdateCommand> BuildUpdateStudyCommandsFromDuplicate()
        {
            List <BaseImageLevelUpdateCommand> commands = new List <BaseImageLevelUpdateCommand>();

            if (WorkQueueUidList.Count > 0)
            {
                WorkQueueUid uid  = WorkQueueUidList[0];
                DicomFile    file = LoadDuplicateDicomFile(uid, true);
                ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                // Create a list of commands to update the existing study based on what's defined in StudyMatchingMap
                // The value will be taken from the content of the duplicate image.
                commands.AddRange(commandBuilder.BuildCommands <StudyMatchingMap>(file.DataSet,
                                                                                  new[] {
                    new ServerEntityAttributeProvider(StorageLocation.Study),
                    new ServerEntityAttributeProvider(StorageLocation.Patient)
                }));
            }

            return(commands);
        }
        void CreateDuplicateSIQEntry(WorkQueueUid uid, DicomFile file, List <DicomAttributeComparisonResult> differences)
        {
            Platform.Log(LogLevel.Info, "Duplicate SOP is different from existing copy. Creating duplicate SIQ entry. SOP: {0}", uid.SopInstanceUid);

            using (var processor = new ServerCommandProcessor("Create Duplicate SIQ Entry"))
            {
                var insertCommand = new InsertOrUpdateEntryCommand(
                    uid.GroupID, StorageLocation, file,
                    ServerPlatform.GetDuplicateGroupPath(StorageLocation, uid),
                    string.IsNullOrEmpty(uid.RelativePath)
                        ? Path.Combine(StorageLocation.StudyInstanceUid, uid.SopInstanceUid + "." + uid.Extension)
                        : uid.RelativePath,
                    differences);
                processor.AddCommand(insertCommand);

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.Execute();
            }
        }
        void SaveDuplicateReport(WorkQueueUid uid, string sourceFile, string destinationFile, DicomFile dupFile, StudyXml studyXml)
        {
            using (var processor = new ServerCommandProcessor("Save duplicate report"))
            {
                var sopContext = new ServerActionContext(dupFile, Context.StorageLocation.FilesystemKey, Context.StorageLocation.ServerPartition, Context.StorageLocation.Key, processor);

                processor.AddCommand(new RenameFileCommand(sourceFile, destinationFile, false));

                // Update the StudyStream object
                processor.AddCommand(new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));

                // Update the series
                processor.AddCommand(new UpdateSeriesCommand(Context.StorageLocation, sopContext));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.AddCommand(new ApplySopRulesCommand(sopContext, Context.SopProcessedRulesEngine));

                processor.Execute();
            }
        }
        /// <summary>
        /// Process a specific DICOM file related to a <see cref="WorkQueue"/> request.
        /// </summary>
        /// <param name="queueUid"></param>
        /// <param name="stream">The <see cref="StudyXml"/> file to update with information from the file.</param>
        /// <param name="file">The file being processed.</param>
        /// <param name="compare">Indicates whether to compare the DICOM file against the study in the system.</param>
        protected virtual void ProcessFile(WorkQueueUid queueUid, DicomFile file, StudyXml stream, bool compare)
        {
            var processor = new SopInstanceProcessor(Context)
            {
                EnforceNameRules = true
            };

            var  fileInfo = new FileInfo(file.Filename);
            long fileSize = fileInfo.Length;

            processor.InstanceStats.FileLoadTime.Start();
            processor.InstanceStats.FileLoadTime.End();
            processor.InstanceStats.FileSize = (ulong)fileSize;
            string sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, "File:" + fileInfo.Name);

            processor.InstanceStats.Description = sopInstanceUid;

            string group = queueUid.GroupID ?? ServerHelper.GetUidGroup(file, ServerPartition, WorkQueueItem.InsertTime);

            ProcessingResult result = processor.ProcessFile(group, file, stream, compare, true, queueUid, null);

            if (result.Status == ProcessingStatus.Reconciled)
            {
                // file has been saved by SopInstanceProcessor in another place for reconcilation
                // Note: SopInstanceProcessor has removed the WorkQueueUid so we
                // only need to delete the file here.
                FileUtils.Delete(fileInfo.FullName);
            }

            Statistics.StudyInstanceUid = StorageLocation.StudyInstanceUid;
            if (String.IsNullOrEmpty(processor.Modality) == false)
            {
                Statistics.Modality = processor.Modality;
            }

            // Update the statistics
            Statistics.NumInstances++;
            Statistics.AddSubStats(processor.InstanceStats);
        }
        /// <summary>
        /// Apply changes to the file prior to processing it.
        /// </summary>
        /// <param name="uid"></param>
        /// <param name="file"></param>
        protected virtual InstancePreProcessingResult PreProcessFile(WorkQueueUid uid, DicomFile file)
        {
            String contextID = uid.GroupID ?? String.Format("{0}_{1}",
                                                            String.IsNullOrEmpty(file.SourceApplicationEntityTitle) ? ServerPartition.AeTitle : file.SourceApplicationEntityTitle,
                                                            WorkQueueItem.InsertTime.ToString("yyyyMMddHHmmss"));

            var result = new InstancePreProcessingResult();

            var        patientNameRules = new PatientNameRules(Study);
            UpdateItem updateItem       = patientNameRules.Apply(file);

            result.Modified = updateItem != null;

            var autoBaseReconciler = new AutoReconciler(contextID, StorageLocation);
            InstancePreProcessingResult reconcileResult = autoBaseReconciler.Process(file);

            result.AutoReconciled = reconcileResult != null;
            result.Modified      |= reconcileResult != null;

            if (reconcileResult != null && reconcileResult.DiscardImage)
            {
                result.DiscardImage = true;
            }

            // if the studyuid is modified, the file will be deleted by the caller.
            if (file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid))
            {
                if (result.Modified)
                {
                    file.Save();
                }
            }


            return(result);
        }
示例#31
0
        private void InsertInstance(DicomFile file, StudyXml stream, WorkQueueUid uid, string deleteFile)
        {
            using (ServerCommandProcessor processor = new ServerCommandProcessor("Processing WorkQueue DICOM file"))
            {
                EventsHelper.Fire(OnInsertingSop, this, new SopInsertingEventArgs {
                    Processor = processor
                });

                InsertInstanceCommand insertInstanceCommand = null;
                InsertStudyXmlCommand insertStudyXmlCommand = null;

                String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, String.Empty);
                _modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty);

                if (_context.UpdateCommands.Count > 0)
                {
                    foreach (BaseImageLevelUpdateCommand command in _context.UpdateCommands)
                    {
                        command.File = file;
                        processor.AddCommand(command);
                    }
                }

                try
                {
                    // Create a context for applying actions from the rules engine
                    ServerActionContext context =
                        new ServerActionContext(file, _context.StorageLocation.FilesystemKey, _context.Partition, _context.StorageLocation.Key);
                    context.CommandProcessor = processor;

                    _context.SopCompressionRulesEngine.Execute(context);
                    String seriesUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty);
                    String sopUid    = file.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty);
                    String finalDest = _context.StorageLocation.GetSopInstancePath(seriesUid, sopUid);

                    if (_context.UpdateCommands.Count > 0)
                    {
                        processor.AddCommand(new SaveDicomFileCommand(_context.StorageLocation, file, file.Filename != finalDest));
                    }
                    else if (file.Filename != finalDest || processor.CommandCount > 0)
                    {
                        // Have to be careful here about failure on exists vs. not failing on exists
                        // because of the different use cases of the importer.
                        // save the file in the study folder, or if its been compressed
                        processor.AddCommand(new SaveDicomFileCommand(finalDest, file, file.Filename != finalDest));
                    }

                    // Update the StudyStream object
                    insertStudyXmlCommand = new InsertStudyXmlCommand(file, stream, _context.StorageLocation);
                    processor.AddCommand(insertStudyXmlCommand);

                    // Have the rules applied during the command processor, and add the objects.
                    processor.AddCommand(new ApplySopRulesCommand(context, _context.SopProcessedRulesEngine));

                    // If specified, delete the file
                    if (deleteFile != null)
                    {
                        processor.AddCommand(new FileDeleteCommand(deleteFile, true));
                    }

                    // Insert into the database, but only if its not a duplicate so the counts don't get off
                    insertInstanceCommand = new InsertInstanceCommand(file, _context.StorageLocation);
                    processor.AddCommand(insertInstanceCommand);

                    // Do a check if the StudyStatus value should be changed in the StorageLocation.  This
                    // should only occur if the object has been compressed in the previous steps.
                    processor.AddCommand(new UpdateStudyStatusCommand(_context.StorageLocation, file));

                    if (uid != null)
                    {
                        processor.AddCommand(new DeleteWorkQueueUidCommand(uid));
                    }

                    // Do the actual processing
                    if (!processor.Execute())
                    {
                        Platform.Log(LogLevel.Error, "Failure processing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid);
                        Platform.Log(LogLevel.Error, "File that failed processing: {0}", file.Filename);
                        throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException);
                    }
                    Platform.Log(ServerPlatform.InstanceLogLevel, "Processed SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid, patientsName);
                }
                catch (Exception e)
                {
                    Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}.  Rolling back operation.",
                                 processor.Description);
                    processor.Rollback();
                    throw new ApplicationException("Unexpected exception when processing file.", e);
                }
                finally
                {
                    if (insertInstanceCommand != null && insertInstanceCommand.Statistics.IsSet)
                    {
                        _instanceStats.InsertDBTime.Add(insertInstanceCommand.Statistics);
                    }
                    if (insertStudyXmlCommand != null && insertStudyXmlCommand.Statistics.IsSet)
                    {
                        _instanceStats.InsertStreamTime.Add(insertStudyXmlCommand.Statistics);
                    }
                }
            }
        }