protected virtual void OnInstanceSent(StorageInstance instance) { List <WorkQueueUid> foundUids = FindWorkQueueUids(instance); if (instance.SendStatus.Equals(DicomStatuses.SOPClassNotSupported)) { WorkQueueItem.FailureDescription = String.Format("SOP Class not supported by remote device: {0}", instance.SopClass.Name); Platform.Log(LogLevel.Warn, "Unable to transfer SOP Instance, SOP Class is not supported by remote device: {0}", instance.SopClass.Name); } if (instance.SendStatus.Status == DicomState.Failure) { WorkQueueItem.FailureDescription = instance.SendStatus.Description; foreach (WorkQueueUid uid in foundUids) { uid.FailureCount++; UpdateWorkQueueUid(uid); } } else if (foundUids != null) { foreach (WorkQueueUid uid in foundUids) { DeleteWorkQueueUid(uid); WorkQueueUidList.Remove(uid); } } }
/// <summary> /// Process all of the SOP Instances associated with a <see cref="WorkItem"/> item. /// </summary> /// <returns>Number of instances that have been processed successfully.</returns> private bool ProcessUidList() { StudyXml studyXml = Location.LoadStudyXml(); int successfulProcessCount = 0; int lastSuccessProcessCount = -1; bool filesStillBeingAdded = false; // Loop through requerying the database while (successfulProcessCount > lastSuccessProcessCount) { // If we're just doing a few at a time, less than the batch size, Postpone for now if (lastSuccessProcessCount != -1 && (successfulProcessCount - lastSuccessProcessCount) < WorkItemServiceSettings.Default.StudyProcessBatchSize) { break; } lastSuccessProcessCount = successfulProcessCount; LoadUids(); //Keep idling as long as there's new stuff being added to process, regardless of success. if (Progress.TotalFilesToProcess != WorkQueueUidList.Count) { filesStillBeingAdded = true; } Progress.TotalFilesToProcess = WorkQueueUidList.Count; Proxy.UpdateProgress(true); int maxBatch = WorkItemServiceSettings.Default.StudyProcessBatchSize; var fileList = new List <WorkItemUid>(maxBatch); foreach (WorkItemUid sop in WorkQueueUidList) { if (sop.Failed) { continue; } if (sop.Complete) { continue; } if (CancelPending) { Platform.Log(LogLevel.Info, "Processing of study canceled: {0}", Location.Study.StudyInstanceUid); return(successfulProcessCount > 0); } if (StopPending) { Platform.Log(LogLevel.Info, "Processing of study stopped: {0}", Location.Study.StudyInstanceUid); return(successfulProcessCount > 0); } if (sop.FailureCount > 0) { // Failed SOPs we process individually // All others we batch if (fileList.Count > 0) { if (ProcessWorkQueueUids(fileList, studyXml)) { successfulProcessCount++; } fileList = new List <WorkItemUid>(); } fileList.Add(sop); if (ProcessWorkQueueUids(fileList, studyXml)) { successfulProcessCount++; } fileList = new List <WorkItemUid>(); } else { fileList.Add(sop); if (fileList.Count >= maxBatch) { // TODO (CR Jun 2012 - Med): This method indicates there is a relation between "process count" and the number // of SOPs processed, but successfulProcessCount is only incremented by 1 for all the SOPs processed here. // Will this unnecessarily slow processing down? // Maybe ProcessWorkQueueUids should return the number processed successfully? // (SW) - The inner loop through the WorkQueueUidList causes all the files that were available at the start of the processing of the WorkItem to be available. // I don't think this is a significant issue, but it is ugly code. We could just increment successfulProcessCount by fileList.Count to make it consistent. if (ProcessWorkQueueUids(fileList, studyXml)) { successfulProcessCount++; } fileList = new List <WorkItemUid>(); } } } if (fileList.Count > 0) { if (ProcessWorkQueueUids(fileList, studyXml)) { successfulProcessCount++; } } } int failureItems = WorkQueueUidList.Count(s => s.Failed); if (failureItems != Progress.NumberOfProcessingFailures) { Progress.NumberOfProcessingFailures = failureItems; Proxy.UpdateProgress(true); return(true); } return(successfulProcessCount > 0 || filesStillBeingAdded); }