private void PreprocessDuplicate(DicomFile duplicateDicomFile, ProcessDuplicateAction action)
        {
            _patientNameRules.Apply(duplicateDicomFile);

            if (action == ProcessDuplicateAction.OverwriteUseExisting)
            {
                var sq = new OriginalAttributesSequence
                {
                    ModifiedAttributesSequence        = new DicomSequenceItem(),
                    ModifyingSystem                   = ProductInformation.Component,
                    ReasonForTheAttributeModification = "COERCE",
                    AttributeModificationDatetime     = Platform.Time,
                    SourceOfPreviousValues            = duplicateDicomFile.SourceApplicationEntityTitle
                };

                foreach (BaseImageLevelUpdateCommand command in _duplicateUpdateCommands)
                {
                    if (!command.Apply(duplicateDicomFile, sq))
                    {
                        throw new ApplicationException(String.Format("Unable to update the duplicate sop. Command={0}", command));
                    }
                }

                var sqAttrib = duplicateDicomFile.DataSet[DicomTags.OriginalAttributesSequence] as DicomAttributeSQ;
                if (sqAttrib != null)
                {
                    sqAttrib.AddSequenceItem(sq.DicomSequenceItem);
                }
            }
        }
        private void OverwriteExistingInstance(WorkQueueUid uid, ProcessDuplicateAction action)
        {
            if (ExistsInStudy(uid))
            {
                // remove the existing image and update the count
                RemoveExistingImage(uid);
            }

            DicomFile duplicateDicomFile = LoadDuplicateDicomFile(uid, false);

            PreprocessDuplicate(duplicateDicomFile, action);
            AddDuplicateToStudy(duplicateDicomFile, uid, action);
        }
Exemplo n.º 3
0
        private void PreprocessDuplicate(DicomFile duplicateDicomFile, ProcessDuplicateAction action)
        {
            _patientNameRules.Apply(duplicateDicomFile);

            if (action == ProcessDuplicateAction.OverwriteUseExisting)
            {
                foreach (BaseImageLevelUpdateCommand command in _duplicateUpdateCommands)
                {
                    if (!command.Apply(duplicateDicomFile))
                    {
                        throw new ApplicationException(String.Format("Unable to update the duplicate sop. Command={0}", command));
                    }
                }
            }
        }
Exemplo n.º 4
0
        protected void OKButton_Click(object sender, ImageClickEventArgs e)
        {
            try
            {
                var itemKey    = ViewState["QueueItem"] as ServerEntityKey;
                var controller = new DuplicateSopEntryController();
                ProcessDuplicateAction action = ProcessDuplicateAction.OverwriteAsIs;
                if (UseExistingSopRadioButton.Checked)
                {
                    action = ProcessDuplicateAction.OverwriteUseExisting;
                }
                else if (UseDuplicateRadioButton.Checked)
                {
                    action = ProcessDuplicateAction.OverwriteUseDuplicates;
                }
                else if (DeleteDuplicateRadioButton.Checked)
                {
                    action = ProcessDuplicateAction.Delete;
                }
                else if (ReplaceAsIsRadioButton.Checked)
                {
                    action = ProcessDuplicateAction.OverwriteAsIs;
                }

                controller.Process(itemKey, action);
            }
            catch (Exception ex)
            {
                MessageBox.Message     = String.Format(ErrorMessages.ActionNotAllowedAtThisTime, ex.Message);
                MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR;
                MessageBox.Show();
            }

            //((Default) Page).UpdateUI();
            Close();
        }
        /// <summary>
        /// Inserts work queue entry to process the duplicates.
        /// </summary>
        /// <param name="entryKey"><see cref="ServerEntityKey"/> of the <see cref="StudyIntegrityQueue"/> entry  that has <see cref="StudyIntegrityReasonEnum"/> equal to <see cref="StudyIntegrityReasonEnum.Duplicate"/> </param>
        /// <param name="action"></param>
        public void Process(ServerEntityKey entryKey, ProcessDuplicateAction action)
        {
            
            DuplicateSopReceivedQueue entry = DuplicateSopReceivedQueue.Load(HttpContextData.Current.ReadContext, entryKey);
            Platform.CheckTrue(entry.StudyIntegrityReasonEnum == StudyIntegrityReasonEnum.Duplicate, "Invalid type of entry");

            IList<StudyIntegrityQueueUid> uids = LoadDuplicateSopUid(entry);

            using(IUpdateContext context = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
            {
                ProcessDuplicateQueueEntryQueueData data = new ProcessDuplicateQueueEntryQueueData
                {
                    Action = action,
                    DuplicateSopFolder = entry.GetFolderPath(context),
                    UserName = ServerHelper.CurrentUserName,                                                              		
                }; 
                
                LockStudyParameters lockParms = new LockStudyParameters
                {
                    QueueStudyStateEnum = QueueStudyStateEnum.ReconcileScheduled,
                    StudyStorageKey = entry.StudyStorageKey
                };

                ILockStudy lockBbroker = context.GetBroker<ILockStudy>();
                lockBbroker.Execute(lockParms);
                if (!lockParms.Successful)
                {
                    throw new ApplicationException(lockParms.FailureReason);
                }

            	IWorkQueueProcessDuplicateSopBroker broker = context.GetBroker<IWorkQueueProcessDuplicateSopBroker>();
                WorkQueueProcessDuplicateSopUpdateColumns columns = new WorkQueueProcessDuplicateSopUpdateColumns
                                                                    	{
                                                                    		Data = XmlUtils.SerializeAsXmlDoc(data),
                                                                    		GroupID = entry.GroupID,
                                                                    		ScheduledTime = Platform.Time,
                                                                    		ExpirationTime =
                                                                    			Platform.Time.Add(TimeSpan.FromMinutes(15)),
                                                                    		ServerPartitionKey = entry.ServerPartitionKey,
                                                                    		WorkQueuePriorityEnum =
                                                                    			WorkQueuePriorityEnum.Medium,
                                                                    		StudyStorageKey = entry.StudyStorageKey,
                                                                    		WorkQueueStatusEnum = WorkQueueStatusEnum.Pending
                                                                    	};

            	WorkQueueProcessDuplicateSop processDuplicateWorkQueueEntry = broker.Insert(columns);

                IWorkQueueUidEntityBroker workQueueUidBroker = context.GetBroker<IWorkQueueUidEntityBroker>();
                IStudyIntegrityQueueUidEntityBroker duplicateUidBroke = context.GetBroker<IStudyIntegrityQueueUidEntityBroker>();
                foreach (StudyIntegrityQueueUid uid in uids)
                {
                    WorkQueueUidUpdateColumns uidColumns = new WorkQueueUidUpdateColumns
                                                           	{
                                                           		Duplicate = true,
                                                           		Extension = ServerPlatform.DuplicateFileExtension,
                                                           		SeriesInstanceUid = uid.SeriesInstanceUid,
                                                           		SopInstanceUid = uid.SopInstanceUid,
                                                           		RelativePath = uid.RelativePath,
                                                           		WorkQueueKey = processDuplicateWorkQueueEntry.GetKey()
                                                           	};

                	workQueueUidBroker.Insert(uidColumns);

                    duplicateUidBroke.Delete(uid.GetKey());
                }

                IDuplicateSopEntryEntityBroker duplicateEntryBroker =
                    context.GetBroker<IDuplicateSopEntryEntityBroker>();
                duplicateEntryBroker.Delete(entry.GetKey());


                context.Commit();
            }
        }
        private void AddDuplicateToStudy(DicomFile duplicateDicomFile, WorkQueueUid uid, ProcessDuplicateAction action)
        {
            
            var context = new StudyProcessorContext(StorageLocation, WorkQueueItem);
            var sopInstanceProcessor = new SopInstanceProcessor(context) { EnforceNameRules = true };
            string group = uid.GroupID ?? ServerHelper.GetUidGroup(duplicateDicomFile, ServerPartition, WorkQueueItem.InsertTime);

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            int originalInstanceCount = studyXml.NumberOfStudyRelatedInstances;

            bool compare = action != ProcessDuplicateAction.OverwriteAsIs;
            // NOTE: "compare" has no effect for OverwriteUseExisting or OverwriteUseDuplicate
            // because in both cases, the study and the duplicates are modified to be the same.
            ProcessingResult result = sopInstanceProcessor.ProcessFile(group, duplicateDicomFile, studyXml, compare, true, uid, duplicateDicomFile.Filename, SopInstanceProcessorSopType.UpdatedSop);
            if (result.Status == ProcessingStatus.Reconciled)
            {
                throw new ApplicationException("Unexpected status of Reconciled image in duplicate handling!");
            }

            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCount + 1);
            Debug.Assert(File.Exists(StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid)));

        }
        private void PreprocessDuplicate(DicomFile duplicateDicomFile, ProcessDuplicateAction action)
        {
            _patientNameRules.Apply(duplicateDicomFile);

            if (action==ProcessDuplicateAction.OverwriteUseExisting)
            {
	            var sq = new OriginalAttributesSequence
		            {
			            ModifiedAttributesSequence = new DicomSequenceItem(),
			            ModifyingSystem = ProductInformation.Component,
			            ReasonForTheAttributeModification = "COERCE",
			            AttributeModificationDatetime = Platform.Time,
			            SourceOfPreviousValues = duplicateDicomFile.SourceApplicationEntityTitle
		            };

                foreach (BaseImageLevelUpdateCommand command in _duplicateUpdateCommands)
                {
                    if (!command.Apply(duplicateDicomFile, sq))
                        throw new ApplicationException(String.Format("Unable to update the duplicate sop. Command={0}", command));
                }

				var sqAttrib = duplicateDicomFile.DataSet[DicomTags.OriginalAttributesSequence] as DicomAttributeSQ;
				if (sqAttrib != null)
					sqAttrib.AddSequenceItem(sq.DicomSequenceItem);
            }
        }
        private void OverwriteExistingInstance(WorkQueueUid uid, ProcessDuplicateAction action)
        {
            if (ExistsInStudy(uid))
            {
                // remove the existing image and update the count
                RemoveExistingImage(uid);
            }

            DicomFile duplicateDicomFile = LoadDuplicateDicomFile(uid, false);
            PreprocessDuplicate(duplicateDicomFile, action);
            AddDuplicateToStudy(duplicateDicomFile, uid, action);
        }
        private void PreprocessDuplicate(DicomFile duplicateDicomFile, ProcessDuplicateAction action)
        {
            _patientNameRules.Apply(duplicateDicomFile);

            if (action==ProcessDuplicateAction.OverwriteUseExisting)
            {
                foreach (BaseImageLevelUpdateCommand command in _duplicateUpdateCommands)
                {
                    if (!command.Apply(duplicateDicomFile))
                        throw new ApplicationException(String.Format("Unable to update the duplicate sop. Command={0}", command));
                }
            }
        }
        /// <summary>
        /// Inserts work queue entry to process the duplicates.
        /// </summary>
        /// <param name="entryKey"><see cref="ServerEntityKey"/> of the <see cref="StudyIntegrityQueue"/> entry  that has <see cref="StudyIntegrityReasonEnum"/> equal to <see cref="StudyIntegrityReasonEnum.Duplicate"/> </param>
        /// <param name="action"></param>
        public void Process(ServerEntityKey entryKey, ProcessDuplicateAction action)
        {
            DuplicateSopReceivedQueue entry = DuplicateSopReceivedQueue.Load(HttpContextData.Current.ReadContext, entryKey);

            Platform.CheckTrue(entry.StudyIntegrityReasonEnum == StudyIntegrityReasonEnum.Duplicate, "Invalid type of entry");

            IList <StudyIntegrityQueueUid> uids = LoadDuplicateSopUid(entry);

            using (IUpdateContext context = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
            {
                ProcessDuplicateQueueEntryQueueData data = new ProcessDuplicateQueueEntryQueueData
                {
                    Action             = action,
                    DuplicateSopFolder = entry.GetFolderPath(context),
                    UserName           = ServerHelper.CurrentUserName,
                };

                LockStudyParameters lockParms = new LockStudyParameters
                {
                    QueueStudyStateEnum = QueueStudyStateEnum.ReconcileScheduled,
                    StudyStorageKey     = entry.StudyStorageKey
                };

                ILockStudy lockBbroker = context.GetBroker <ILockStudy>();
                lockBbroker.Execute(lockParms);
                if (!lockParms.Successful)
                {
                    throw new ApplicationException(lockParms.FailureReason);
                }

                IWorkQueueProcessDuplicateSopBroker       broker  = context.GetBroker <IWorkQueueProcessDuplicateSopBroker>();
                WorkQueueProcessDuplicateSopUpdateColumns columns = new WorkQueueProcessDuplicateSopUpdateColumns
                {
                    Data           = XmlUtils.SerializeAsXmlDoc(data),
                    GroupID        = entry.GroupID,
                    ScheduledTime  = Platform.Time,
                    ExpirationTime =
                        Platform.Time.Add(TimeSpan.FromMinutes(15)),
                    ServerPartitionKey    = entry.ServerPartitionKey,
                    WorkQueuePriorityEnum =
                        WorkQueuePriorityEnum.Medium,
                    StudyStorageKey     = entry.StudyStorageKey,
                    WorkQueueStatusEnum = WorkQueueStatusEnum.Pending
                };

                WorkQueueProcessDuplicateSop processDuplicateWorkQueueEntry = broker.Insert(columns);

                IWorkQueueUidEntityBroker           workQueueUidBroker = context.GetBroker <IWorkQueueUidEntityBroker>();
                IStudyIntegrityQueueUidEntityBroker duplicateUidBroke  = context.GetBroker <IStudyIntegrityQueueUidEntityBroker>();
                foreach (StudyIntegrityQueueUid uid in uids)
                {
                    WorkQueueUidUpdateColumns uidColumns = new WorkQueueUidUpdateColumns
                    {
                        Duplicate         = true,
                        Extension         = ServerPlatform.DuplicateFileExtension,
                        SeriesInstanceUid = uid.SeriesInstanceUid,
                        SopInstanceUid    = uid.SopInstanceUid,
                        RelativePath      = uid.RelativePath,
                        WorkQueueKey      = processDuplicateWorkQueueEntry.GetKey()
                    };

                    workQueueUidBroker.Insert(uidColumns);

                    duplicateUidBroke.Delete(uid.GetKey());
                }

                IDuplicateSopEntryEntityBroker duplicateEntryBroker =
                    context.GetBroker <IDuplicateSopEntryEntityBroker>();
                duplicateEntryBroker.Delete(entry.GetKey());


                context.Commit();
            }
        }
        private void AddDuplicateToStudy(DicomFile duplicateDicomFile, WorkQueueUid uid, ProcessDuplicateAction action)
        {
            var context = new StudyProcessorContext(StorageLocation, WorkQueueItem);
            var sopInstanceProcessor = new SopInstanceProcessor(context)
            {
                EnforceNameRules = true
            };
            string group = uid.GroupID ?? ServerHelper.GetUidGroup(duplicateDicomFile, ServerPartition, WorkQueueItem.InsertTime);

            StudyXml studyXml = StorageLocation.LoadStudyXml();
            int      originalInstanceCount = studyXml.NumberOfStudyRelatedInstances;

            bool compare = action != ProcessDuplicateAction.OverwriteAsIs;
            // NOTE: "compare" has no effect for OverwriteUseExisting or OverwriteUseDuplicate
            // because in both cases, the study and the duplicates are modified to be the same.
            ProcessingResult result = sopInstanceProcessor.ProcessFile(group, duplicateDicomFile, studyXml, compare, true, uid, duplicateDicomFile.Filename, SopInstanceProcessorSopType.UpdatedSop);

            if (result.Status == ProcessingStatus.Reconciled)
            {
                throw new ApplicationException("Unexpected status of Reconciled image in duplicate handling!");
            }

            Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCount + 1);
            Debug.Assert(File.Exists(StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid)));
        }