public void OnStudyEditing(WebEditStudyContext context) { _studyInfo = StudyInformation.CreateFrom(context.OriginalStudy); _changeDesc = new WebEditStudyHistoryChangeDescription { UpdateCommands = context.EditCommands, TimeStamp = Platform.Time, UserId = context.UserId, Reason = context.Reason, EditType = context.EditType }; }
public static StudyInformation CreateFrom(Study study) { ServerEntityAttributeProvider studyWrapper = new ServerEntityAttributeProvider(study); StudyInformation studyInfo = new StudyInformation(studyWrapper); foreach (Series series in study.Series.Values) { ServerEntityAttributeProvider seriesWrapper = new ServerEntityAttributeProvider(series); SeriesInformation seriesInfo = new SeriesInformation(seriesWrapper); studyInfo.Add(seriesInfo); } return(studyInfo); }
protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckMemberIsSet(StorageLocation, "StorageLocation"); Platform.CheckForNullReference(Study, "Study doesn't exist"); if (WorkQueueUidList.Count == 0) { // we are done. Just need to cleanup the duplicate folder Platform.Log(LogLevel.Info, "{0} is completed. Cleaning up duplicate storage folder. (GUID={1}, action={2})", item.WorkQueueTypeEnum, item.GetKey().Key, _processDuplicateEntry.QueueData.Action); CleanUpReconcileFolders(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { Platform.Log(LogLevel.Info, "Processing {0} entry (GUID={1}, action={2})", item.WorkQueueTypeEnum, item.GetKey().Key, _processDuplicateEntry.QueueData.Action); Platform.CheckTrue(Directory.Exists(DuplicateFolder), String.Format("Duplicate Folder {0} doesn't exist.", DuplicateFolder)); LogWorkQueueInfo(); EnsureStorageLocationIsWritable(StorageLocation); _currentStudyInfo = StudyInformation.CreateFrom(Study); ImageSetDetails duplicateSopDetails = null; // If deleting duplicates then don't log the history if (_processDuplicateEntry.QueueData.Action != ProcessDuplicateAction.Delete && !HistoryLogged) { duplicateSopDetails = LoadDuplicateDetails(); } try { UpdateStudyOrDuplicates(); int count = ProcessUidList(); // If deleting duplicates then don't log the history if (_processDuplicateEntry.QueueData.Action != ProcessDuplicateAction.Delete && !HistoryLogged && duplicateSopDetails != null && count > 0) { LogHistory(duplicateSopDetails); } PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } finally { UpdateQueueData(); } } }
public ImageSetDetails(IDicomAttributeProvider attributeProvider) { StudyInfo = new StudyInformation(attributeProvider); }
public ImageSetDetails() { StudyInfo = new StudyInformation(); }
public InconsistentDataSIQRecord(StudyIntegrityQueue queue) { _queueItem = queue; ReconcileStudyWorkQueueData data = XmlUtils.Deserialize<ReconcileStudyWorkQueueData>(queue.Details); _conflictingImageDetails = data.Details; _conflictingImageDescriptor = XmlUtils.Deserialize<ImageSetDescriptor>(queue.StudyData); StudyStorage storage = StudyStorage.Load(HttpContextData.Current.ReadContext, queue.StudyStorageKey); Study study = storage.LoadStudy(HttpContextData.Current.ReadContext); _existingStudyInfo = new StudyInformation(new ServerEntityAttributeProvider(study)); }
public void OnSeriesDeleting(WebDeleteProcessorContext context, Series series) { _studyInfo = StudyInformation.CreateFrom(context.StorageLocation.Study); }
public static StudyInformation CreateFrom(Study study) { ServerEntityAttributeProvider studyWrapper = new ServerEntityAttributeProvider(study); StudyInformation studyInfo = new StudyInformation(studyWrapper); foreach(Series series in study.Series.Values) { ServerEntityAttributeProvider seriesWrapper = new ServerEntityAttributeProvider(series); SeriesInformation seriesInfo = new SeriesInformation(seriesWrapper); studyInfo.Add(seriesInfo); } return studyInfo; }
public ImageSetDetails(IDicomAttributeProvider attributeProvider) { StudyInfo = new StudyInformation(attributeProvider); }
public ImageSetDetails() { StudyInfo = new StudyInformation(); }