private List <BaseImageLevelUpdateCommand> BuildUpdateCommandList()
        {
            List <BaseImageLevelUpdateCommand> updateCommandList = new List <BaseImageLevelUpdateCommand>();

            ImageUpdateCommandBuilder builder = new ImageUpdateCommandBuilder();

            updateCommandList.AddRange(builder.BuildCommands <StudyMatchingMap>(_destinationStudyStorage));

            return(updateCommandList);
        }
        private void UpdateStudyOrDuplicates()
        {
            // StorageLocation object must be reloaded if we are overwriting the study
            // with info in the duplicates.
            bool needReload = false;

            switch (_processDuplicateEntry.QueueData.Action)
            {
            case ProcessDuplicateAction.OverwriteUseDuplicates:

                if (_processDuplicateEntry.QueueData.State.ExistingStudyUpdated)
                {
                    Platform.Log(LogLevel.Info, "Existing Study has been updated before");
                }
                else
                {
                    Platform.Log(LogLevel.Info, "Update Existing Study w/ Duplicate Info");
                    _studyUpdateCommands = BuildUpdateStudyCommandsFromDuplicate();
                    using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Existing Study w/ Duplicate Info"))
                    {
                        processor.AddCommand(new UpdateStudyCommand(ServerPartition, StorageLocation, _studyUpdateCommands, ServerRuleApplyTimeEnum.SopProcessed, WorkQueueItem));
                        if (!processor.Execute())
                        {
                            throw new ApplicationException(processor.FailureReason, processor.FailureException);
                        }

                        needReload = true;
                        _processDuplicateEntry.QueueData.State.ExistingStudyUpdated = true;
                    }
                }

                break;

            case ProcessDuplicateAction.OverwriteUseExisting:
                ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                _duplicateUpdateCommands = new List <BaseImageLevelUpdateCommand>();
                _duplicateUpdateCommands.AddRange(commandBuilder.BuildCommands <StudyMatchingMap>(StorageLocation));
                PrintCommands(_duplicateUpdateCommands);
                break;
            }

            if (needReload)
            {
                StudyStorageLocation updatedStorageLocation;

                //NOTE: Make sure we are loading the storage location fro the database instead of the cache.
                if (!FilesystemMonitor.Instance.GetWritableStudyStorageLocation(WorkQueueItem.StudyStorageKey, out updatedStorageLocation))
                {
                    // this is odd.. we just updated it and now it's no longer writable?
                    throw new ApplicationException("Filesystem is not writable");
                }
                StorageLocation = updatedStorageLocation;
            }
        }
        /// <summary>
        /// Gets the commands to update the study
        /// </summary>
        /// <returns></returns>
        private List <BaseImageLevelUpdateCommand> BuildUpdateStudyCommandsFromDuplicate()
        {
            List <BaseImageLevelUpdateCommand> commands = new List <BaseImageLevelUpdateCommand>();

            if (WorkQueueUidList.Count > 0)
            {
                WorkQueueUid uid  = WorkQueueUidList[0];
                DicomFile    file = LoadDuplicateDicomFile(uid, true);
                ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                // Create a list of commands to update the existing study based on what's defined in StudyMatchingMap
                // The value will be taken from the content of the duplicate image.
                commands.AddRange(commandBuilder.BuildCommands <StudyMatchingMap>(file.DataSet,
                                                                                  new[] {
                    new ServerEntityAttributeProvider(StorageLocation.Study),
                    new ServerEntityAttributeProvider(StorageLocation.Patient)
                }));
            }

            return(commands);
        }
Exemple #4
0
		private List<BaseImageLevelUpdateCommand> BuildUpdateCommandList()
		{
			List<BaseImageLevelUpdateCommand> updateCommandList = new List<BaseImageLevelUpdateCommand>();
            
			ImageUpdateCommandBuilder builder = new ImageUpdateCommandBuilder();
			updateCommandList.AddRange(builder.BuildCommands<StudyMatchingMap>(_destinationStudyStorage));
            
			return updateCommandList;
		}
        /// <summary>
        /// Gets the commands to update the study 
        /// </summary>
        /// <returns></returns>
        private List<BaseImageLevelUpdateCommand> BuildUpdateStudyCommandsFromDuplicate()
        {
            List<BaseImageLevelUpdateCommand> commands = new List<BaseImageLevelUpdateCommand>();
            if (WorkQueueUidList.Count>0)
            {
                WorkQueueUid uid = WorkQueueUidList[0];
                DicomFile file = LoadDuplicateDicomFile(uid, true);
                ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                // Create a list of commands to update the existing study based on what's defined in StudyMatchingMap
                // The value will be taken from the content of the duplicate image.
                commands.AddRange(commandBuilder.BuildCommands<StudyMatchingMap>(file.DataSet, 
                        new[]{  
                                new ServerEntityAttributeProvider(StorageLocation.Study),
                                new ServerEntityAttributeProvider(StorageLocation.Patient)
                             }));
            }

            return commands;
        }
        private void UpdateStudyOrDuplicates()
        {
            // StorageLocation object must be reloaded if we are overwriting the study
            // with info in the duplicates. 
            bool needReload = false;

            switch (_processDuplicateEntry.QueueData.Action)
            {
                case ProcessDuplicateAction.OverwriteUseDuplicates:

                    if (_processDuplicateEntry.QueueData.State.ExistingStudyUpdated)
                        Platform.Log(LogLevel.Info, "Existing Study has been updated before");
                    else
                    {
                        Platform.Log(LogLevel.Info, "Update Existing Study w/ Duplicate Info");
                        _studyUpdateCommands = BuildUpdateStudyCommandsFromDuplicate();
                        using (ServerCommandProcessor processor = new ServerCommandProcessor("Update Existing Study w/ Duplicate Info"))
                        {
                            processor.AddCommand(new UpdateStudyCommand(ServerPartition, StorageLocation, _studyUpdateCommands, ServerRuleApplyTimeEnum.SopProcessed, WorkQueueItem));
                            if (!processor.Execute())
                            {
                                throw new ApplicationException(processor.FailureReason, processor.FailureException);
                            }

                            needReload = true;
                            _processDuplicateEntry.QueueData.State.ExistingStudyUpdated = true;
                        }
                    }
                    
                    break;
                    
                case ProcessDuplicateAction.OverwriteUseExisting:
                    ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                    _duplicateUpdateCommands = new List<BaseImageLevelUpdateCommand>();
                    _duplicateUpdateCommands.AddRange(commandBuilder.BuildCommands<StudyMatchingMap>(StorageLocation));
                    PrintCommands(_duplicateUpdateCommands);
                    break;
            }

            if (needReload)
            {
                StudyStorageLocation updatedStorageLocation;
                
                //NOTE: Make sure we are loading the storage location fro the database instead of the cache.
                if (!FilesystemMonitor.Instance.GetWritableStudyStorageLocation(WorkQueueItem.StudyStorageKey, out updatedStorageLocation))
                {
                    // this is odd.. we just updated it and now it's no longer writable?
                    throw new ApplicationException("Filesystem is not writable");
                }
                StorageLocation = updatedStorageLocation;
            }
        }
        private AutoReconcilerResult MergeImage(StudyReconcileAction action, DicomFile file, StudyHistory lastHistory)
        {
            string originalSeriesUid = file.DataSet[DicomTags.SeriesInstanceUid].ToString();
            string originalSopUid    = file.DataSet[DicomTags.SopInstanceUid].ToString();

            AutoReconcilerResult preProcessingResult = null;
            StudyStorageLocation destStudy;
            UidMapper            uidMapper = null;

            if (lastHistory.DestStudyStorageKey != null)
            {
                StudyStorage destinationStudy = StudyStorage.Load(lastHistory.DestStudyStorageKey);

                //Load the destination.  An exception will be thrown if any issues are encountered.
                FilesystemMonitor.Instance.GetWritableStudyStorageLocation(destinationStudy.ServerPartitionKey,
                                                                           destinationStudy.StudyInstanceUid,
                                                                           StudyRestore.True, StudyCache.True,
                                                                           out destStudy);

                EnsureStudyCanBeUpdated(destStudy);

                bool belongsToAnotherStudy = !destStudy.Equals(StorageLocation);

                ImageUpdateCommandBuilder           commandBuilder = new ImageUpdateCommandBuilder();
                IList <BaseImageLevelUpdateCommand> commands       = commandBuilder.BuildCommands <StudyMatchingMap>(destStudy);
                if (belongsToAnotherStudy)
                {
                    Platform.Log(LogLevel.Info, "AUTO-RECONCILE: Move SOP {0} to Study {1}, A#: {2}, Patient {3}", originalSopUid, destStudy.StudyInstanceUid, destStudy.Study.AccessionNumber, destStudy.Study.PatientsName);

                    // Load the Uid Map, either from cache or from disk
                    if (!_uidMapCache.TryGetValue(destStudy.Key, out uidMapper))
                    {
                        UidMapXml mapXml = new UidMapXml();
                        mapXml.Load(destStudy);
                        uidMapper = new UidMapper(mapXml);

                        _uidMapCache.Add(destStudy.Key, uidMapper);
                    }

                    try
                    {
                        commands.Add(GetUidMappingCommand(StorageLocation, destStudy, uidMapper, originalSopUid, originalSeriesUid));
                    }
                    catch (InstanceAlreadyExistsException ex)
                    {
                        Platform.Log(LogLevel.Info, "An instance already exists with the SOP Instance Uid {0}", ex.SopInstanceUid);
                        preProcessingResult = new AutoReconcilerResult(StudyReconcileAction.Discard)
                        {
                            DiscardImage = true
                        };

                        return(preProcessingResult);
                    }
                }


                preProcessingResult = new AutoReconcilerResult(action)
                {
                    Changes = GetUpdateList(file, commands)
                };

                UpdateImage(file, commands);

                // First, must update the map
                if (uidMapper != null && uidMapper.Dirty)
                {
                    UpdateUidMap(destStudy, uidMapper);
                }

                if (belongsToAnotherStudy)
                {
                    SopInstanceImporterContext importContext = new SopInstanceImporterContext(_contextID, file.SourceApplicationEntityTitle, destStudy.ServerPartition);
                    SopInstanceImporter        importer      = new SopInstanceImporter(importContext);
                    DicomProcessingResult      result        = importer.Import(file);

                    if (!result.Successful)
                    {
                        throw new ApplicationException(result.ErrorMessage);
                    }
                }
            }
            return(preProcessingResult);
        }
Exemple #8
0
        private AutoReconcilerResult MergeImage(StudyReconcileAction action, DicomFile file, StudyHistory lastHistory)
        {
            string originalSeriesUid = file.DataSet[DicomTags.SeriesInstanceUid].ToString();
            string originalSopUid = file.DataSet[DicomTags.SopInstanceUid].ToString();
            
            AutoReconcilerResult preProcessingResult = null;
        	StudyStorageLocation destStudy;
            UidMapper uidMapper = null;
        	if (lastHistory.DestStudyStorageKey != null)
            {
                StudyStorage destinationStudy = StudyStorage.Load(lastHistory.DestStudyStorageKey);

				//Load the destination.  An exception will be thrown if any issues are encountered.
				FilesystemMonitor.Instance.GetWritableStudyStorageLocation(destinationStudy.ServerPartitionKey,
																		   destinationStudy.StudyInstanceUid, 
																		   StudyRestore.True, StudyCache.True, 
																		   out destStudy);

                EnsureStudyCanBeUpdated(destStudy);

                bool belongsToAnotherStudy = !destStudy.Equals(StorageLocation);

                ImageUpdateCommandBuilder commandBuilder = new ImageUpdateCommandBuilder();
                IList<BaseImageLevelUpdateCommand> commands = commandBuilder.BuildCommands<StudyMatchingMap>(destStudy);
                if (belongsToAnotherStudy)
                {
                    Platform.Log(LogLevel.Info, "AUTO-RECONCILE: Move SOP {0} to Study {1}, A#: {2}, Patient {3}", originalSopUid, destStudy.StudyInstanceUid, destStudy.Study.AccessionNumber, destStudy.Study.PatientsName);

                    // Load the Uid Map, either from cache or from disk
                    if (!_uidMapCache.TryGetValue(destStudy.Key, out uidMapper))
                    {
                        UidMapXml mapXml = new UidMapXml();
                        mapXml.Load(destStudy);
                        uidMapper = new UidMapper(mapXml);

                        _uidMapCache.Add(destStudy.Key, uidMapper);
                    }

                    try
                    {
                        commands.Add(GetUidMappingCommand(StorageLocation, destStudy, uidMapper, originalSopUid, originalSeriesUid));
                    }
                    catch (InstanceAlreadyExistsException ex)
                    {
                        Platform.Log(LogLevel.Info, "An instance already exists with the SOP Instance Uid {0}", ex.SopInstanceUid);
                        preProcessingResult = new AutoReconcilerResult(StudyReconcileAction.Discard) { DiscardImage = true };

                        return preProcessingResult;
                    }
                }


                preProcessingResult = new AutoReconcilerResult(action) { Changes = GetUpdateList(file, commands) };

                UpdateImage(file, commands);

                // First, must update the map
                if (uidMapper != null && uidMapper.Dirty)
                {
                    UpdateUidMap(destStudy, uidMapper);
                }

                if (belongsToAnotherStudy)
                {
                    SopInstanceImporterContext importContext = new SopInstanceImporterContext(_contextID, file.SourceApplicationEntityTitle, destStudy.ServerPartition);
                    SopInstanceImporter importer = new SopInstanceImporter(importContext);
                    DicomProcessingResult result = importer.Import(file);

                    if (!result.Successful)
                    {
                        throw new ApplicationException(result.ErrorMessage);
                    }
                }


            }
            return preProcessingResult;
        }