protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckMemberIsSet(StorageLocation, "StorageLocation"); Platform.CheckForNullReference(Study, "Study doesn't exist"); if (WorkQueueUidList.Count == 0) { // we are done. Just need to cleanup the duplicate folder Platform.Log(LogLevel.Info, "{0} is completed. Cleaning up duplicate storage folder. (GUID={1}, action={2})", item.WorkQueueTypeEnum, item.GetKey().Key, _processDuplicateEntry.QueueData.Action); CleanUpReconcileFolders(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { Platform.Log(LogLevel.Info, "Processing {0} entry (GUID={1}, action={2})", item.WorkQueueTypeEnum, item.GetKey().Key, _processDuplicateEntry.QueueData.Action); Platform.CheckTrue(Directory.Exists(DuplicateFolder), String.Format("Duplicate Folder {0} doesn't exist.", DuplicateFolder)); LogWorkQueueInfo(); EnsureStorageLocationIsWritable(StorageLocation); _currentStudyInfo = StudyInformation.CreateFrom(Study); ImageSetDetails duplicateSopDetails = null; // If deleting duplicates then don't log the history if (_processDuplicateEntry.QueueData.Action != ProcessDuplicateAction.Delete && !HistoryLogged) { duplicateSopDetails = LoadDuplicateDetails(); } try { UpdateStudyOrDuplicates(); int count = ProcessUidList(); // If deleting duplicates then don't log the history if (_processDuplicateEntry.QueueData.Action != ProcessDuplicateAction.Delete && !HistoryLogged && duplicateSopDetails != null && count > 0) { LogHistory(duplicateSopDetails); } PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } finally { UpdateQueueData(); } } }
public InconsistentDataSIQRecord(StudyIntegrityQueue queue) { _queueItem = queue; ReconcileStudyWorkQueueData data = XmlUtils.Deserialize <ReconcileStudyWorkQueueData>(queue.Details); _conflictingImageDetails = data.Details; _conflictingImageDescriptor = XmlUtils.Deserialize <ImageSetDescriptor>(queue.StudyData); StudyStorage storage = StudyStorage.Load(HttpContextData.Current.ReadContext, queue.StudyStorageKey); Study study = storage.LoadStudy(HttpContextData.Current.ReadContext); _existingStudyInfo = new StudyInformation(new ServerEntityAttributeProvider(study)); }
private ImageSetDetails LoadDuplicateDetails() { IList <WorkQueueUid> uids = LoadAllWorkQueueUids(); ImageSetDetails details = null; foreach (WorkQueueUid uid in uids) { DicomFile file = LoadDuplicateDicomFile(uid, true); if (details == null) { details = new ImageSetDetails(file.DataSet); } details.InsertFile(file); } return(details); }
private void LogHistory(ImageSetDetails details) { IPersistentStore store = PersistentStoreRegistry.GetDefaultStore(); using (IUpdateContext ctx = store.OpenUpdateContext(UpdateContextSyncMode.Flush)) { Platform.Log(LogLevel.Info, "Logging study history record..."); IStudyHistoryEntityBroker broker = ctx.GetBroker <IStudyHistoryEntityBroker>(); StudyHistoryUpdateColumns recordColumns = CreateStudyHistoryRecord(details); StudyHistory entry = broker.Insert(recordColumns); if (entry != null) { ctx.Commit(); } else { throw new ApplicationException("Unable to log study history record"); } } HistoryLogged = true; }
private StudyHistoryUpdateColumns CreateStudyHistoryRecord(ImageSetDetails details) { var columns = new StudyHistoryUpdateColumns { InsertTime = Platform.Time, StudyHistoryTypeEnum = StudyHistoryTypeEnum.Duplicate, StudyStorageKey = StorageLocation.GetKey(), DestStudyStorageKey = StorageLocation.GetKey(), StudyData = XmlUtils.SerializeAsXmlDoc(_currentStudyInfo) }; var changeLog = new ProcessDuplicateChangeLog { Action = _processDuplicateEntry.QueueData.Action, DuplicateDetails = details, StudySnapShot = _currentStudyInfo, StudyUpdateCommands = _studyUpdateCommands, UserName = _processDuplicateEntry.QueueData.UserName }; XmlDocument doc = XmlUtils.SerializeAsXmlDoc(changeLog); columns.ChangeDescription = doc; return(columns); }