/// <summary> /// Validates the state of the study. /// </summary> /// <param name="context">Name of the application</param> /// <param name="studyStorage">The study to validate</param> /// <param name="modes">Specifying what validation to execute</param> public void ValidateStudyState(String context, StudyStorageLocation studyStorage, StudyIntegrityValidationModes modes) { Platform.CheckForNullReference(studyStorage, "studyStorage"); if (modes == StudyIntegrityValidationModes.None) { return; } using (ServerExecutionContext scope = new ServerExecutionContext()) { Study study = studyStorage.LoadStudy(scope.PersistenceContext); if (study != null) { StudyXml studyXml = studyStorage.LoadStudyXml(); if (modes == StudyIntegrityValidationModes.Default || (modes & StudyIntegrityValidationModes.InstanceCount) == StudyIntegrityValidationModes.InstanceCount) { if (studyXml != null && studyXml.NumberOfStudyRelatedInstances != study.NumberOfStudyRelatedInstances) { ValidationStudyInfo validationStudyInfo = new ValidationStudyInfo(study, studyStorage.ServerPartition); throw new StudyIntegrityValidationFailure( ValidationErrors.InconsistentObjectCount, validationStudyInfo, String.Format("Number of instances in database and xml do not match: {0} vs {1}.", study.NumberOfStudyRelatedInstances, studyXml.NumberOfStudyRelatedInstances )); } } } } }
/// <summary> /// Validates the state of the study. /// </summary> /// <param name="context">Name of the application</param> /// <param name="studyStorage">The study to validate</param> /// <param name="modes">Specifying what validation to execute</param> public void ValidateStudyState(String context, StudyStorageLocation studyStorage, StudyIntegrityValidationModes modes) { Platform.CheckForNullReference(studyStorage, "studyStorage"); if (modes == StudyIntegrityValidationModes.None) return; using (ServerExecutionContext scope = new ServerExecutionContext()) { Study study = studyStorage.LoadStudy(scope.PersistenceContext); if (study!=null) { StudyXml studyXml = studyStorage.LoadStudyXml(); if (modes == StudyIntegrityValidationModes.Default || (modes & StudyIntegrityValidationModes.InstanceCount) == StudyIntegrityValidationModes.InstanceCount) { if (studyXml != null && studyXml.NumberOfStudyRelatedInstances != study.NumberOfStudyRelatedInstances) { ValidationStudyInfo validationStudyInfo = new ValidationStudyInfo(study, studyStorage.ServerPartition); throw new StudyIntegrityValidationFailure( ValidationErrors.InconsistentObjectCount, validationStudyInfo, String.Format("Number of instances in database and xml do not match: {0} vs {1}.", study.NumberOfStudyRelatedInstances, studyXml.NumberOfStudyRelatedInstances )); } } } } }
private DicomProcessingResult HandleDuplicateFile(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomMessageBase message, string sourceFilename, StudyProcessWorkQueueData data) { Study study = studyLocation.Study ?? studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); if (study != null) { Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2} Patient: {3} ID:{4})", sopInstanceUid, study.AccessionNumber, study.StudyInstanceUid, study.PatientsName, study.PatientId); } else { Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.", sopInstanceUid, studyLocation.StudyInstanceUid); } var sopProcessingContext = new SopInstanceProcessorContext(commandProcessor, studyLocation, _context.ContextID, _context.Request) { DuplicateProcessing = _context.DuplicateProcessing }; DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, message, data, sourceFilename); return(result); }
public IList <BaseImageLevelUpdateCommand> BuildCommands <TMappingObject>(StudyStorageLocation storageLocation, IDicomAttributeProvider originalDicomAttributeProvider) { StudyXml studyXml = GetStudyXml(storageLocation); List <BaseImageLevelUpdateCommand> commandList = new List <BaseImageLevelUpdateCommand>(); if (studyXml.NumberOfStudyRelatedInstances == 0) { // StudyXml is empty, resort to the db instead. Study study = storageLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); IList <BaseImageLevelUpdateCommand> cmds = BuildCommandsFromEntity(study, originalDicomAttributeProvider); // find the original values from originalDicomAttributeProvider if (originalDicomAttributeProvider != null) { foreach (BaseImageLevelUpdateCommand cmd in cmds) { IUpdateImageTagCommand theCmd = cmd; if (theCmd != null) { DicomAttribute attribute; if (originalDicomAttributeProvider.TryGetAttribute(theCmd.UpdateEntry.TagPath.Tag, out attribute)) { theCmd.UpdateEntry.OriginalValue = attribute.ToString(); } } } } commandList.AddRange(cmds); } else { commandList.AddRange(BuildCommandsFromStudyXml(typeof(TMappingObject), studyXml, originalDicomAttributeProvider)); } return(commandList); }
private DicomProcessingResult HandleDuplicate(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomFile file) { Study study = studyLocation.Study ?? studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); if (study != null) { Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2} Patient: {3} ID:{4})", sopInstanceUid, study.AccessionNumber, study.StudyInstanceUid, study.PatientsName, study.PatientId); } else { Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.", sopInstanceUid, studyLocation.StudyInstanceUid); } SopProcessingContext sopProcessingContext = new SopProcessingContext(commandProcessor, studyLocation, _context.ContextID); DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, file); return(result); }
private ReconcileStudyQueueDescription CreateQueueEntryDescription(DicomFile file) { using (var context = new ServerExecutionContext()) { Study study = _studyLocation.LoadStudy(context.PersistenceContext); if (study != null) { var desc = new ReconcileStudyQueueDescription { ExistingPatientId = study.PatientId, ExistingPatientName = study.PatientsName, ExistingAccessionNumber = study.AccessionNumber, ConflictingPatientName = file.DataSet[DicomTags.PatientsName].ToString(), ConflictingPatientId = file.DataSet[DicomTags.PatientId].ToString(), ConflictingAccessionNumber = file.DataSet[DicomTags.AccessionNumber].ToString() }; return(desc); } return(null); } }
private void Initialize() { using (IPersistenceContext readContext = PersistentStoreRegistry.GetDefaultStore().OpenReadContext()) { _backupDir = ServerExecutionContext.Current.BackupDirectory; _oldStudyPath = _oldStudyLocation.GetStudyPath(); _oldStudyInstanceUid = _oldStudyLocation.StudyInstanceUid; _oldStudyFolder = _oldStudyLocation.StudyFolder; _newStudyInstanceUid = _oldStudyInstanceUid; _study = _oldStudyLocation.LoadStudy(readContext); _totalSopCount = _study.NumberOfStudyRelatedInstances; _curPatient = _study.LoadPatient(readContext); _oldPatientInfo = new PatientInfo { Name = _curPatient.PatientsName, PatientId = _curPatient.PatientId, IssuerOfPatientId = _curPatient.IssuerOfPatientId }; _newPatientInfo = new PatientInfo(_oldPatientInfo); Debug.Assert(_newPatientInfo.Equals(_oldPatientInfo)); foreach (BaseImageLevelUpdateCommand command in _commands) { ImageLevelUpdateEntry imageLevelUpdate = command.UpdateEntry; if (imageLevelUpdate == null) { continue; } if (imageLevelUpdate.TagPath.Tag.TagValue == DicomTags.StudyInstanceUid) { _newStudyInstanceUid = imageLevelUpdate.GetStringValue(); } else if (imageLevelUpdate.TagPath.Tag.TagValue == DicomTags.PatientId) { _newPatientInfo.PatientId = imageLevelUpdate.GetStringValue(); } else if (imageLevelUpdate.TagPath.Tag.TagValue == DicomTags.IssuerOfPatientId) { _newPatientInfo.IssuerOfPatientId = imageLevelUpdate.GetStringValue(); } else if (imageLevelUpdate.TagPath.Tag.TagValue == DicomTags.PatientsName) { _newPatientInfo.Name = imageLevelUpdate.GetStringValue(); } } Platform.CheckForNullReference(_newStudyInstanceUid, "_newStudyInstanceUid"); NewStudyPath = Path.Combine(_oldStudyLocation.FilesystemPath, _partition.PartitionFolder); NewStudyPath = Path.Combine(NewStudyPath, _oldStudyFolder); NewStudyPath = Path.Combine(NewStudyPath, _newStudyInstanceUid); _newPatient = FindPatient(_newPatientInfo, readContext); _patientInfoIsNotChanged = _newPatientInfo.Equals(_oldPatientInfo); Statistics.InstanceCount = _study.NumberOfStudyRelatedInstances; Statistics.StudySize = (ulong)_oldStudyLocation.LoadStudyXml().GetStudySize(); // The study path will be changed. We will need to delete the original folder at the end. // May be too simple to test if two paths are the same. But let's assume it is good enough for 99% of the time. _deleteOriginalFolder = NewStudyPath != _oldStudyPath; _initialized = true; } }
private DicomProcessingResult HandleDuplicateFile(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomMessageBase message, string sourceFilename, StudyProcessWorkQueueData data) { Study study = studyLocation.Study ?? studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); if (study != null) Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2} Patient: {3} ID:{4})", sopInstanceUid, study.AccessionNumber, study.StudyInstanceUid, study.PatientsName, study.PatientId); else Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.", sopInstanceUid, studyLocation.StudyInstanceUid); var sopProcessingContext = new SopInstanceProcessorContext(commandProcessor, studyLocation, _context.ContextID, _context.Request) { DuplicateProcessing = _context.DuplicateProcessing }; DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, message, data, sourceFilename); return result; }
/// <summary> /// Inserts a <see cref="WorkQueue"/> request to reprocess the study /// </summary> /// <param name="ctx"></param> /// <param name="reason"></param> /// <param name="location"></param> /// <param name="additionalPaths"></param> /// <param name="scheduleTime"></param> /// <returns></returns> /// <exception cref="InvalidStudyStateOperationException">Study is in a state that reprocessing is not allowed</exception> /// public WorkQueue ReprocessStudy(IUpdateContext ctx, String reason, StudyStorageLocation location, List <FilesystemDynamicPath> additionalPaths, DateTime scheduleTime) { Platform.CheckForNullReference(location, "location"); if (location.StudyStatusEnum.Equals(StudyStatusEnum.OnlineLossy)) { if (location.IsLatestArchiveLossless) { string message = String.Format("Study has been archived as lossless and is currently lossy. It must be restored first"); throw new InvalidStudyStateOperationException(message); } } Study study = location.LoadStudy(ctx); // Unlock first. ILockStudy lockStudy = ctx.GetBroker <ILockStudy>(); LockStudyParameters lockParms = new LockStudyParameters(); lockParms.StudyStorageKey = location.Key; lockParms.QueueStudyStateEnum = QueueStudyStateEnum.Idle; if (!lockStudy.Execute(lockParms) || !lockParms.Successful) { // Note: according to the stored proc, setting study state to Idle always succeeds so // this will never happen return(null); } // Now relock into ReprocessScheduled state. If another process locks the study before this occurs, // lockParms.QueueStudyStateEnum = QueueStudyStateEnum.ReprocessScheduled; if (!lockStudy.Execute(lockParms) || !lockParms.Successful) { throw new InvalidStudyStateOperationException(lockParms.FailureReason); } InsertWorkQueueParameters columns = new InsertWorkQueueParameters(); columns.ScheduledTime = scheduleTime; columns.ServerPartitionKey = location.ServerPartitionKey; columns.StudyStorageKey = location.Key; columns.WorkQueueTypeEnum = WorkQueueTypeEnum.ReprocessStudy; ReprocessStudyQueueData queueData = new ReprocessStudyQueueData(); queueData.State = new ReprocessStudyState(); queueData.State.ExecuteAtLeastOnce = false; queueData.ChangeLog = new ReprocessStudyChangeLog(); queueData.ChangeLog.Reason = reason; queueData.ChangeLog.TimeStamp = Platform.Time; queueData.ChangeLog.User = (Thread.CurrentPrincipal is CustomPrincipal) ? (Thread.CurrentPrincipal as CustomPrincipal).Identity.Name : String.Empty; if (additionalPaths != null) { queueData.AdditionalFiles = additionalPaths.ConvertAll <string>(path => path.ToString()); } columns.WorkQueueData = XmlUtils.SerializeAsXmlDoc(queueData); IInsertWorkQueue insertBroker = ctx.GetBroker <IInsertWorkQueue>(); WorkQueue reprocessEntry = insertBroker.FindOne(columns); if (reprocessEntry != null) { if (study != null) { Platform.Log(LogLevel.Info, "Study Reprocess Scheduled for Study {0}, A#: {1}, Patient: {2}, ID={3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId); } else { Platform.Log(LogLevel.Info, "Study Reprocess Scheduled for Study {0}.", location.StudyInstanceUid); } } return(reprocessEntry); }
private DicomProcessingResult HandleDuplicate(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomFile file) { Study study = studyLocation.Study ?? studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); if (study != null) Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2} Patient: {3} ID:{4})", sopInstanceUid, study.AccessionNumber, study.StudyInstanceUid, study.PatientsName, study.PatientId); else Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.", sopInstanceUid, studyLocation.StudyInstanceUid); SopProcessingContext sopProcessingContext = new SopProcessingContext(commandProcessor, studyLocation, _context.ContextID); DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, file); return result; }
/// <summary> /// Inserts a <see cref="WorkQueue"/> request to reprocess the study /// </summary> /// <param name="ctx"></param> /// <param name="reason"></param> /// <param name="location"></param> /// <param name="additionalPaths"></param> /// <param name="scheduleTime"></param> /// <returns></returns> /// <exception cref="InvalidStudyStateOperationException">Study is in a state that reprocessing is not allowed</exception> /// public WorkQueue ReprocessStudy(IUpdateContext ctx, String reason, StudyStorageLocation location, List<FilesystemDynamicPath> additionalPaths, DateTime scheduleTime) { Platform.CheckForNullReference(location, "location"); if (location.StudyStatusEnum.Equals(StudyStatusEnum.OnlineLossy)) { if (location.IsLatestArchiveLossless) { string message = String.Format("Study has been archived as lossless and is currently lossy. It must be restored first"); throw new InvalidStudyStateOperationException(message); } } Study study = location.LoadStudy(ctx); // Unlock first. ILockStudy lockStudy = ctx.GetBroker<ILockStudy>(); LockStudyParameters lockParms = new LockStudyParameters(); lockParms.StudyStorageKey = location.Key; lockParms.QueueStudyStateEnum = QueueStudyStateEnum.Idle; if (!lockStudy.Execute(lockParms) || !lockParms.Successful) { // Note: according to the stored proc, setting study state to Idle always succeeds so // this will never happen return null; } // Now relock into ReprocessScheduled state. If another process locks the study before this occurs, // lockParms.QueueStudyStateEnum = QueueStudyStateEnum.ReprocessScheduled; if (!lockStudy.Execute(lockParms) || !lockParms.Successful) { throw new InvalidStudyStateOperationException(lockParms.FailureReason); } InsertWorkQueueParameters columns = new InsertWorkQueueParameters(); columns.ScheduledTime = scheduleTime; columns.ServerPartitionKey = location.ServerPartitionKey; columns.StudyStorageKey = location.Key; columns.WorkQueueTypeEnum = WorkQueueTypeEnum.ReprocessStudy; ReprocessStudyQueueData queueData = new ReprocessStudyQueueData(); queueData.State = new ReprocessStudyState(); queueData.State.ExecuteAtLeastOnce = false; queueData.ChangeLog = new ReprocessStudyChangeLog(); queueData.ChangeLog.Reason = reason; queueData.ChangeLog.TimeStamp = Platform.Time; queueData.ChangeLog.User = (Thread.CurrentPrincipal is CustomPrincipal) ? (Thread.CurrentPrincipal as CustomPrincipal).Identity.Name : String.Empty; if (additionalPaths != null) queueData.AdditionalFiles = additionalPaths.ConvertAll<string>(path => path.ToString()); columns.WorkQueueData = XmlUtils.SerializeAsXmlDoc(queueData); IInsertWorkQueue insertBroker = ctx.GetBroker<IInsertWorkQueue>(); WorkQueue reprocessEntry = insertBroker.FindOne(columns); if (reprocessEntry != null) { if (study != null) { Platform.Log(LogLevel.Info, "Study Reprocess Scheduled for Study {0}, A#: {1}, Patient: {2}, ID={3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId); } else { Platform.Log(LogLevel.Info, "Study Reprocess Scheduled for Study {0}.", location.StudyInstanceUid); } } return reprocessEntry; }