protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckMemberIsSet(StorageLocation, "StorageLocation"); Platform.CheckForNullReference(Study, "Study doesn't exist"); if (WorkQueueUidList.Count == 0) { // we are done. Just need to cleanup the duplicate folder Platform.Log(LogLevel.Info, "{0} is completed. Cleaning up duplicate storage folder. (GUID={1}, action={2})", item.WorkQueueTypeEnum, item.GetKey().Key, _processDuplicateEntry.QueueData.Action); CleanUpReconcileFolders(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { Platform.Log(LogLevel.Info, "Processing {0} entry (GUID={1}, action={2})", item.WorkQueueTypeEnum, item.GetKey().Key, _processDuplicateEntry.QueueData.Action); Platform.CheckTrue(Directory.Exists(DuplicateFolder), String.Format("Duplicate Folder {0} doesn't exist.", DuplicateFolder)); LogWorkQueueInfo(); EnsureStorageLocationIsWritable(StorageLocation); _currentStudyInfo = StudyInformation.CreateFrom(Study); ImageSetDetails duplicateSopDetails = null; // If deleting duplicates then don't log the history if (_processDuplicateEntry.QueueData.Action != ProcessDuplicateAction.Delete && !HistoryLogged) { duplicateSopDetails = LoadDuplicateDetails(); } try { UpdateStudyOrDuplicates(); int count = ProcessUidList(); // If deleting duplicates then don't log the history if (_processDuplicateEntry.QueueData.Action != ProcessDuplicateAction.Delete && !HistoryLogged && duplicateSopDetails != null && count > 0) { LogHistory(duplicateSopDetails); } PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } finally { UpdateQueueData(); } } }
/// <summary> /// The actual delegate /// </summary> /// <param name="processor"></param> /// <param name="queueItem"></param> private void ExecuteProcessor(IWorkQueueItemProcessor processor, Model.WorkQueue queueItem) { try { processor.Process(queueItem); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when processing WorkQueue item of type {0}. Failing Queue item. (GUID: {1})", queueItem.WorkQueueTypeEnum, queueItem.GetKey()); String error = e.InnerException != null ? e.InnerException.Message : e.Message; FailQueueItem(queueItem, error); } finally { // Signal the parent thread, so it can query again _threadStop.Set(); // Cleanup the processor processor.Dispose(); } }
private static WorkQueueDetails CreateGeneralWorkQueueItemDetails(Model.WorkQueue item) { var detail = new WorkQueueDetails(); detail.Key = item.Key; detail.ScheduledDateTime = item.ScheduledTime; detail.ExpirationTime = item.ExpirationTime; detail.InsertTime = item.InsertTime; detail.FailureCount = item.FailureCount; detail.Type = item.WorkQueueTypeEnum; detail.Status = item.WorkQueueStatusEnum; detail.Priority = item.WorkQueuePriorityEnum; detail.FailureDescription = item.FailureDescription; detail.ServerDescription = item.ProcessorID; StudyStorageLocation storage = WorkQueueController.GetLoadStorageLocation(item); detail.StorageLocationPath = storage.GetStudyPath(); // Fetch UIDs var wqUidsAdaptor = new WorkQueueUidAdaptor(); var uidCriteria = new WorkQueueUidSelectCriteria(); uidCriteria.WorkQueueKey.EqualTo(item.GetKey()); IList <WorkQueueUid> uids = wqUidsAdaptor.Get(uidCriteria); var mapSeries = new Hashtable(); foreach (WorkQueueUid uid in uids) { if (mapSeries.ContainsKey(uid.SeriesInstanceUid) == false) { mapSeries.Add(uid.SeriesInstanceUid, uid.SopInstanceUid); } } detail.NumInstancesPending = uids.Count; detail.NumSeriesPending = mapSeries.Count; // Fetch the study and patient info var ssAdaptor = new StudyStorageAdaptor(); StudyStorage storages = ssAdaptor.Get(item.StudyStorageKey); var studyAdaptor = new StudyAdaptor(); var studycriteria = new StudySelectCriteria(); studycriteria.StudyInstanceUid.EqualTo(storages.StudyInstanceUid); studycriteria.ServerPartitionKey.EqualTo(item.ServerPartitionKey); Study study = studyAdaptor.GetFirst(studycriteria); // Study may not be available until the images are processed. if (study != null) { var studyAssembler = new StudyDetailsAssembler(); detail.Study = studyAssembler.CreateStudyDetail(study); } return(detail); }
/// <summary> /// Simple routine for failing a work queue item. /// </summary> /// <param name="item">The item to fail.</param> /// <param name="failureDescription">The reason for the failure.</param> protected override void FailQueueItem(Model.WorkQueue item, string failureDescription) { DBUpdateTime.Add( delegate { using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { IUpdateWorkQueue update = updateContext.GetBroker <IUpdateWorkQueue>(); UpdateWorkQueueParameters parms = new UpdateWorkQueueParameters { ProcessorID = ServerPlatform.ProcessorId, WorkQueueKey = item.GetKey(), StudyStorageKey = item.StudyStorageKey, FailureCount = item.FailureCount + 1, FailureDescription = failureDescription }; Platform.Log(LogLevel.Error, "Failing {0} WorkQueue entry ({1}): {2}", item.WorkQueueTypeEnum, item.GetKey(), failureDescription); parms.WorkQueueStatusEnum = WorkQueueStatusEnum.Failed; parms.ScheduledTime = Platform.Time; parms.ExpirationTime = Platform.Time.AddDays(1); if (false == update.Execute(parms)) { Platform.Log(LogLevel.Error, "Unable to update {0} WorkQueue GUID: {1}", item.WorkQueueTypeEnum, item.GetKey().ToString()); } else { updateContext.Commit(); } } } ); }
public void RaiseAlert(Model.WorkQueue queueItem, AlertLevel level, string message) { WorkQueueTypeProperties prop = _propertiesDictionary[queueItem.WorkQueueTypeEnum]; if (prop.AlertFailedWorkQueue || level == AlertLevel.Critical) { ServerPlatform.Alert(AlertCategory.Application, level, queueItem.WorkQueueTypeEnum.ToString(), AlertTypeCodes.UnableToProcess, GetWorkQueueContextData(queueItem), TimeSpan.Zero, "Work Queue item failed: Type={0}, GUID={1}: {2}", queueItem.WorkQueueTypeEnum, queueItem.GetKey(), message); } }
private void SaveState(Model.WorkQueue item, ReprocessStudyQueueData queueData) { // Update the queue state using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { queueData.State.ExecuteAtLeastOnce = true; var broker = updateContext.GetBroker <IWorkQueueEntityBroker>(); var parms = new WorkQueueUpdateColumns { Data = XmlUtils.SerializeAsXmlDoc(_queueData) }; broker.Update(item.GetKey(), parms); updateContext.Commit(); } }
/// <summary> /// Pops up a dialog box to let user to reschedule a work queue item /// </summary> private void RescheduleWorkQueueItem(Model.WorkQueue item) { if (item == null) { MessageBox.BackgroundCSS = string.Empty; MessageBox.Message = SR.SelectedWorkQueueNoLongerOnTheList; MessageBox.MessageStyle = "color: red; font-weight: bold;"; MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); return; } var keys = new List <ServerEntityKey>(); keys.Add(item.GetKey()); ScheduleWorkQueueDialog.WorkQueueKeys = keys; if (WorkQueueItemDetailsPanel.WorkQueue != null) { if (WorkQueueItemDetailsPanel.WorkQueue.WorkQueueStatusEnum == WorkQueueStatusEnum.Failed) { MessageBox.Message = SR.WorkQueueRescheduleFailed_ItemHasFailed; MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); return; } if (WorkQueueItemDetailsPanel.WorkQueue.WorkQueueStatusEnum == WorkQueueStatusEnum.InProgress) { MessageBox.Message = SR.WorkQueueBeingProcessed; MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); return; } ScheduleWorkQueueDialog.Show(); } }
public ReconcileStudyWorkQueue(Model.WorkQueue workqueue) { Platform.CheckTrue(workqueue.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.ReconcileStudy), String.Format("Cannot copy data from Work Queue record of type {0}", workqueue.WorkQueueTypeEnum)); this.SetKey(workqueue.GetKey()); this.Data = workqueue.Data; this.InsertTime = workqueue.InsertTime; this.DeviceKey = workqueue.DeviceKey; this.ExpirationTime = workqueue.ExpirationTime; this.FailureCount = workqueue.FailureCount; this.FailureDescription = workqueue.FailureDescription; this.GroupID = workqueue.GroupID; this.InsertTime = workqueue.InsertTime; this.ProcessorID = workqueue.ProcessorID; this.ScheduledTime = workqueue.ScheduledTime; this.ServerPartitionKey = workqueue.ServerPartitionKey; this.StudyHistoryKey = workqueue.StudyHistoryKey; this.StudyStorageKey = workqueue.StudyStorageKey; this.WorkQueuePriorityEnum = workqueue.WorkQueuePriorityEnum; this.WorkQueueStatusEnum = workqueue.WorkQueueStatusEnum; this.WorkQueueTypeEnum = this.WorkQueueTypeEnum; }
private void PreDeleteConfirmDialog_Confirmed(object data) { Hide(); var key = data as ServerEntityKey; if (key != null) { var adaptor = new WorkQueueAdaptor(); Model.WorkQueue item = adaptor.Get(key); if (item == null) { MessageBox.Message = SR.WorkQueueNotAvailable; MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); } else { if (item.WorkQueueStatusEnum == WorkQueueStatusEnum.InProgress) { MessageBox.Message = SR.WorkQueueBeingProcessed_CannotDelete; MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); return; } try { bool successful; var controller = new WorkQueueController(); var items = new List <Model.WorkQueue>(); items.Add(item); successful = controller.DeleteWorkQueueItems(items); if (successful) { Platform.Log(LogLevel.Info, "Work Queue item deleted by user : Item Key={0}", item.GetKey().Key); if (WorkQueueItemDeleted != null) { WorkQueueItemDeleted(item); } if (OnHide != null) { OnHide(); } } else { Platform.Log(LogLevel.Error, "PreResetConfirmDialog_Confirmed: Unable to delete work queue item. GUID={0}", item.GetKey().Key); MessageBox.Message = SR.WorkQueueDeleteFailed; MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); } } catch (Exception e) { Platform.Log(LogLevel.Error, "PreResetConfirmDialog_Confirmed: Unable to delete work queue item. GUID={0} : {1}", item.GetKey().Key, e.StackTrace); MessageBox.Message = String.Format(SR.WorkQueueDeleteFailed_WithException, e.Message); MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR; MessageBox.Show(); } } } }
/// <summary> /// Process a <see cref="WorkQueue"/> item of type AutoRoute. /// </summary> protected override void ProcessItem(Model.WorkQueue item) { if (WorkQueueItem.ScheduledTime >= WorkQueueItem.ExpirationTime && !HasPendingItems) { Platform.Log(LogLevel.Debug, "Removing Idle {0} entry : {1}", item.WorkQueueTypeEnum, item.GetKey().Key); base.PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.None); return; } if (!HasPendingItems) { // nothing to process, change to idle state PostProcessing(item, WorkQueueProcessorStatus.Idle, WorkQueueProcessorDatabaseUpdate.None); return; } Platform.Log(LogLevel.Info, "Moving study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} to {5}...", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, DestinationDevice.AeTitle); // Load remote device information from the database. Device device = DestinationDevice; if (device == null) { item.FailureDescription = String.Format("Unknown auto-route destination \"{0}\"", item.DeviceKey); Platform.Log(LogLevel.Error, item.FailureDescription); PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); // Fatal Error return; } if (device.Dhcp && device.IpAddress.Length == 0) { item.FailureDescription = String.Format("Auto-route destination is a DHCP device with no known IP address: \"{0}\"", device.AeTitle); Platform.Log(LogLevel.Error, item.FailureDescription); PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); // Fatal error return; } // Now setup the StorageSCU component int sendCounter = 0; using (ImageServerStorageScu scu = new ImageServerStorageScu(ServerPartition, device)) { using (ServerExecutionContext context = new ServerExecutionContext()) // set the preferred syntax lists scu.LoadPreferredSyntaxes(context.ReadContext); // Load the Instances to Send into the SCU component scu.AddStorageInstanceList(InstanceList); // Set an event to be called when each image is transferred scu.ImageStoreCompleted += delegate(Object sender, StorageInstance instance) { if (instance.SendStatus.Status == DicomState.Success || instance.SendStatus.Status == DicomState.Warning || instance.SendStatus.Equals(DicomStatuses.SOPClassNotSupported)) { sendCounter++; OnInstanceSent(instance); } if (instance.SendStatus.Status == DicomState.Failure) { scu.FailureDescription = instance.SendStatus.Description; if (false == String.IsNullOrEmpty(instance.ExtendedFailureDescription)) { scu.FailureDescription = String.Format("{0} [{1}]", scu.FailureDescription, instance.ExtendedFailureDescription); } } if (CancelPending && !(this is WebMoveStudyItemProcessor) && !scu.Canceled) { Platform.Log(LogLevel.Info, "Auto-route canceled due to shutdown for study: {0}", StorageLocation.StudyInstanceUid); item.FailureDescription = "Operation was canceled due to server shutdown request."; scu.Cancel(); } }; try { // Block until send is complete scu.Send(); // Join for the thread to exit scu.Join(); } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Error occurs while sending images to {0} : {1}", device.AeTitle, ex.Message); } finally { if (scu.FailureDescription.Length > 0) { item.FailureDescription = scu.FailureDescription; scu.Status = ScuOperationStatus.Failed; } // Reset the WorkQueue entry status if ((InstanceList.Count > 0 && sendCounter != InstanceList.Count) || // not all sop were sent scu.Status == ScuOperationStatus.Failed || scu.Status == ScuOperationStatus.ConnectFailed) { PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal); // failures occurred} } else { OnComplete(); } } } }
private static WorkQueueDetails CreateEditWorkQueueItemDetails(Model.WorkQueue item) { string studyPath; try { StudyStorageLocation storage = WorkQueueController.GetLoadStorageLocation(item); studyPath = storage.GetStudyPath(); } catch (Exception) { studyPath = string.Empty; } var detail = new WorkQueueDetails { Key = item.Key, ScheduledDateTime = item.ScheduledTime, ExpirationTime = item.ExpirationTime, InsertTime = item.InsertTime, FailureCount = item.FailureCount, Type = item.WorkQueueTypeEnum, Status = item.WorkQueueStatusEnum, Priority = item.WorkQueuePriorityEnum, FailureDescription = item.FailureDescription, ServerDescription = item.ProcessorID, StorageLocationPath = studyPath }; // Fetch UIDs var wqUidsAdaptor = new WorkQueueUidAdaptor(); var uidCriteria = new WorkQueueUidSelectCriteria(); uidCriteria.WorkQueueKey.EqualTo(item.GetKey()); IList <WorkQueueUid> uids = wqUidsAdaptor.Get(uidCriteria); var mapSeries = new Hashtable(); foreach (WorkQueueUid uid in uids) { if (mapSeries.ContainsKey(uid.SeriesInstanceUid) == false) { mapSeries.Add(uid.SeriesInstanceUid, uid.SopInstanceUid); } } detail.NumInstancesPending = uids.Count; detail.NumSeriesPending = mapSeries.Count; // Fetch the study and patient info var ssAdaptor = new StudyStorageAdaptor(); StudyStorage storages = ssAdaptor.Get(item.StudyStorageKey); var studyAdaptor = new StudyAdaptor(); var studycriteria = new StudySelectCriteria(); studycriteria.StudyInstanceUid.EqualTo(storages.StudyInstanceUid); studycriteria.ServerPartitionKey.EqualTo(item.ServerPartitionKey); Study study = studyAdaptor.GetFirst(studycriteria); // Study may not be available until the images are processed. if (study != null) { var studyAssembler = new StudyDetailsAssembler(); detail.Study = studyAssembler.CreateStudyDetail(study); } var parser = new EditStudyWorkQueueDataParser(); EditStudyWorkQueueData data = parser.Parse(item.Data.DocumentElement); detail.EditUpdateItems = data.EditRequest.UpdateEntries.ToArray(); return(detail); }
/// <summary> /// Simple routine for failing a work queue item. /// </summary> /// <param name="item">The item to fail.</param> /// <param name="failureDescription">The reason for the failure.</param> private void FailQueueItem(Model.WorkQueue item, string failureDescription) { // Must retry to reset the status of the entry in case of db error // Failure to do so will create stale work queue entry (stuck in "In Progress" state) // which can only be recovered by restarting the service. while (true) { try { WorkQueueTypeProperties prop = _propertiesDictionary[item.WorkQueueTypeEnum]; using (IUpdateContext updateContext = _store.OpenUpdateContext(UpdateContextSyncMode.Flush)) { IUpdateWorkQueue update = updateContext.GetBroker <IUpdateWorkQueue>(); UpdateWorkQueueParameters parms = new UpdateWorkQueueParameters { ProcessorID = ServerPlatform.ProcessorId, WorkQueueKey = item.GetKey(), StudyStorageKey = item.StudyStorageKey, FailureCount = item.FailureCount + 1, FailureDescription = failureDescription }; var settings = WorkQueueSettings.Instance; if ((item.FailureCount + 1) > prop.MaxFailureCount) { Platform.Log(LogLevel.Error, "Failing {0} WorkQueue entry ({1}), reached max retry count of {2}. Failure Reason: {3}", item.WorkQueueTypeEnum, item.GetKey(), item.FailureCount + 1, failureDescription); parms.WorkQueueStatusEnum = WorkQueueStatusEnum.Failed; parms.ScheduledTime = Platform.Time; parms.ExpirationTime = Platform.Time.AddDays(1); OnWorkQueueEntryFailed(item, failureDescription); } else { Platform.Log(LogLevel.Error, "Resetting {0} WorkQueue entry ({1}) to Pending, current retry count {2}. Failure Reason: {3}", item.WorkQueueTypeEnum, item.GetKey(), item.FailureCount + 1, failureDescription); parms.WorkQueueStatusEnum = WorkQueueStatusEnum.Pending; parms.ScheduledTime = Platform.Time.AddMilliseconds(settings.WorkQueueQueryDelay); parms.ExpirationTime = Platform.Time.AddSeconds((prop.MaxFailureCount - item.FailureCount) * prop.FailureDelaySeconds); } if (false == update.Execute(parms)) { Platform.Log(LogLevel.Error, "Unable to update {0} WorkQueue GUID: {1}", item.WorkQueueTypeEnum, item.GetKey().ToString()); } else { updateContext.Commit(); break; // done } } } catch (Exception ex) { Platform.Log(LogLevel.Error, "Error occurred when calling FailQueueItem. Retry later. {0}", ex.Message); _terminateEvent.WaitOne(2000, false); if (_stop) { Platform.Log(LogLevel.Warn, "Service is stopping. Retry to fail the entry is terminated."); break; } } } }
protected override string GetTemporaryPath() { IList <StudyStorageLocation> storages = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(_item.StudyStorageKey)); if (storages == null || storages.Count == 0) { // ??? return(base.GetTemporaryPath()); } ServerFilesystemInfo filesystem = FilesystemMonitor.Instance.GetFilesystemInfo(storages[0].FilesystemKey); if (filesystem == null) { // not ready? return(base.GetTemporaryPath()); } string basePath = GetTempPathRoot(); if (String.IsNullOrEmpty(basePath)) { basePath = Path.Combine(filesystem.Filesystem.FilesystemPath, "temp"); } String tempDirectory = Path.Combine(basePath, String.Format("{0}-{1}", _item.WorkQueueTypeEnum.Lookup, _item.GetKey())); for (int i = 2; i < 1000; i++) { if (!Directory.Exists(tempDirectory)) { break; } tempDirectory = Path.Combine(basePath, String.Format("{0}-{1}({2})", _item.WorkQueueTypeEnum.Lookup, _item.GetKey(), i)); } if (!Directory.Exists(tempDirectory)) { Directory.CreateDirectory(tempDirectory); } return(tempDirectory); }
public WorkQueueProcessorContext(Model.WorkQueue item) : base(item.GetKey().Key.ToString()) { _item = item; }
private void PreResetConfirmDialog_Confirmed(object data) { Hide(); var key = data as ServerEntityKey; if (key != null) { var adaptor = new WorkQueueAdaptor(); Model.WorkQueue item = adaptor.Get(key); if (item == null) { String errorMessage = SR.WorkQueueNotAvailable; EventsHelper.Fire(Error, this, new WorkQueueItemResetErrorEventArgs(errorMessage, null)); } else { var controller = new WorkQueueController(); DateTime scheduledTime = item.ScheduledTime; if (scheduledTime < Platform.Time) { scheduledTime = Platform.Time.AddSeconds(WorkQueueSettings.Default.WorkQueueProcessDelaySeconds); } DateTime expirationTime = item.ExpirationTime; if (expirationTime < scheduledTime) { expirationTime = scheduledTime.AddSeconds(WorkQueueSettings.Default.WorkQueueExpireDelaySeconds); } try { var items = new List <Model.WorkQueue>(); items.Add(item); controller.ResetWorkQueueItems(items, scheduledTime, expirationTime); Platform.Log(LogLevel.Info, "{0} Work Queue item reset: Key={1}.", item.WorkQueueTypeEnum, item.GetKey()); if (WorkQueueItemReseted != null) { WorkQueueItemReseted(item); } if (OnHide != null) { OnHide(); } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unable to reset {0} work queue item. Key={1}.", item.WorkQueueTypeEnum, item.GetKey()); String errorMessage = String.Format(SR.WorkQueueResetFailed, e.Message); EventsHelper.Fire(Error, this, new WorkQueueItemResetErrorEventArgs(errorMessage, e)); } } } }
protected override void ProcessItem(Model.WorkQueue item) { LoadUids(item); if (WorkQueueUidList.Count == 0) { // No UIDs associated with the WorkQueue item. Set the status back to idle PostProcessing(item, WorkQueueProcessorStatus.Idle, WorkQueueProcessorDatabaseUpdate.ResetQueueState); return; } XmlElement element = item.Data.DocumentElement; string syntax = element.Attributes["syntax"].Value; TransferSyntax compressSyntax = TransferSyntax.GetTransferSyntax(syntax); if (compressSyntax == null) { item.FailureDescription = String.Format("Invalid transfer syntax in compression WorkQueue item: {0}", element.Attributes["syntax"].Value); Platform.Log(LogLevel.Error, "Error with work queue item {0}: {1}", item.GetKey(), item.FailureDescription); base.PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); return; } if (Study == null) { item.FailureDescription = String.Format("Compression item does not have a linked Study record"); Platform.Log(LogLevel.Error, "Error with work queue item {0}: {1}", item.GetKey(), item.FailureDescription); base.PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); return; } Platform.Log(LogLevel.Info, "Compressing study {0} for Patient {1} (PatientId:{2} A#:{3}) on partition {4} to {5}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, compressSyntax.Name); IDicomCodecFactory[] codecs = DicomCodecRegistry.GetCodecFactories(); IDicomCodecFactory theCodecFactory = null; foreach (IDicomCodecFactory codec in codecs) { if (codec.CodecTransferSyntax.Equals(compressSyntax)) { theCodecFactory = codec; break; } } if (theCodecFactory == null) { item.FailureDescription = String.Format("Unable to find codec for compression: {0}", compressSyntax.Name); Platform.Log(LogLevel.Error, "Error with work queue item {0}: {1}", item.GetKey(), item.FailureDescription); base.PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); return; } if (!ProcessUidList(item, theCodecFactory)) { PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal); } else { Platform.Log(LogLevel.Info, "Completed Compressing study {0} for Patient {1} (PatientId:{2} A#:{3}) on partition {4} to {5}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, compressSyntax.Name); if (compressSyntax.LossyCompressed) { UpdateStudyStatus(StorageLocation, StudyStatusEnum.OnlineLossy, compressSyntax); } else { UpdateStudyStatus(StorageLocation, StudyStatusEnum.OnlineLossless, compressSyntax); } PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); // batch processed, not complete } }