private IWorkItemProcessor InsertImportFiles(WorkItemPriorityEnum priority, WorkItemStatusEnum status) { var rq = new WorkItemInsertRequest { Request = new ImportFilesRequest() { Priority = priority, BadFileBehaviour = BadFileBehaviourEnum.Delete, FileImportBehaviour = FileImportBehaviourEnum.Save, FilePaths = new List<string>(), } }; var rsp = WorkItemService.Instance.Insert(rq); var updateRequest = new WorkItemUpdateRequest { Status = status, Identifier = rsp.Item.Identifier }; // TODO (CR Jul 2012): Can I actually force an item to "In Progress" this way? Probably shouldn't be able to do that. WorkItemService.Instance.Update(updateRequest); using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); var d = new ImportItemProcessor(); d.Initialize(new WorkItemStatusProxy(broker.GetWorkItem(rsp.Item.Identifier))); return d; } }
private void ApplyDefaultDeletionRule(RulesEngineOptions context, StudyEntry study) { if (!context.ApplyDeleteActions) return; // TODO (CR Jun 2012): Again, seem to use "work item" mutex for all database updates. Should just pass in a boolean. using (var dac = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = dac.GetStudyBroker(); var dbStudy = broker.GetStudy(study.Study.StudyInstanceUid); var storageConfiguration = StudyStore.GetConfiguration(); var defaultRule = storageConfiguration.DefaultDeletionRule; if (defaultRule.Enabled) { dbStudy.SetDeleteTime(defaultRule.TimeValue, defaultRule.TimeUnit, TimeOrigin.ReceivedDate, false); } else { dbStudy.ClearDeleteTime(); } dac.Commit(); } }
protected override int OnStart(StudyLoaderArgs studyLoaderArgs) { _sops = null; EventResult result = EventResult.Success; var loadedInstances = new AuditedInstances(); try { using (var context = new DataAccessContext()) { IStudy study = context.GetStudyBroker().GetStudy(studyLoaderArgs.StudyInstanceUid); if (study == null) { result = EventResult.MajorFailure; loadedInstances.AddInstance(studyLoaderArgs.StudyInstanceUid); throw new NotFoundLoadStudyException(studyLoaderArgs.StudyInstanceUid); } loadedInstances.AddInstance(study.PatientId, study.PatientsName, study.StudyInstanceUid); _sops = study.GetSopInstances().GetEnumerator(); return study.NumberOfStudyRelatedInstances; } } finally { AuditHelper.LogOpenStudies(new[] { AuditHelper.LocalAETitle }, loadedInstances, EventSource.CurrentUser, result); } }
private GetStudyCountResult GetStudyCount(GetStudyCountRequest request) { using (var context = new DataAccessContext()) { var count = context.GetStudyStoreQuery().GetStudyCount(request.Criteria); return new GetStudyCountResult { StudyCount = count }; } }
public void Rollback() { if (_context != null) { _context.Dispose(); _context = null; } }
/// <summary> /// Initialize the Reapply Rules. Loast a list of studies. /// </summary> public void Initialize() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetStudyBroker(); StudyOidList = broker.GetStudyOids(); } DatabaseStudiesToScan = StudyOidList.Count; }
public void Dispose() { if (_disposed) throw new InvalidOperationException("Already disposed."); _disposed = true; if (_context != null) { _context.Dispose(); _context = null; } }
private GetImageEntriesResult GetImageEntries(GetImageEntriesRequest request) { using (var context = new DataAccessContext()) { var entries = context.GetStudyStoreQuery().GetImageEntries(request.Criteria); var criteria = (request.Criteria ?? new ImageEntry()).Image ?? new ImageIdentifier(); AuditHelper.LogQueryIssued(null, null, EventSource.CurrentUser, EventResult.Success, SopClass.StudyRootQueryRetrieveInformationModelFindUid, criteria.ToDicomAttributeCollection()); return new GetImageEntriesResult { ImageEntries = entries }; } }
public void Initialize(StudyLocation location) { _location = location; using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(_location.Study.StudyInstanceUid); if (study != null) { _location.Study = study; if (study.NumberOfStudyRelatedInstances.HasValue) NumberOfStudyRelatedInstances = study.NumberOfStudyRelatedInstances.Value; } } }
public bool Process() { // Decided not to use the command processor here, since we're just removing everything and want to be as forgiving as possible. try { DirectoryUtility.DeleteIfExists(_location.StudyFolder); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unable to delete study folder: {0}", _location.StudyFolder); } try { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(_location.Study.StudyInstanceUid); if (study != null) { studyBroker.Delete(study); } context.Commit(); } Platform.Log(LogLevel.Info, "Deleted study for: {0}:{1}", _location.Study.PatientsName, _location.Study.PatientId); return true; } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0} deleting Study related database entries for study: {0}", _location.Study.StudyInstanceUid); return false; } }
/// <summary> /// Method for getting next <see cref="WorkItem"/> entry. /// </summary> /// <param name="count">The count.</param> /// <remarks> /// </remarks> /// <returns> /// A <see cref="WorkItem"/> entry if found, or else null; /// </returns> private List<WorkItem> GetWorkItemsToDelete(int count) { try { // Get WorkItems that have expired that need to be deleted using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var workItems = workItemBroker.GetWorkItemsToDelete(count); foreach (var item in workItems) { item.Status = WorkItemStatusEnum.DeleteInProgress; } context.Commit(); if (workItems.Count > 0) return workItems; } // Get entries already marked as deleted by the GUI. using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var workItems = workItemBroker.GetWorkItemsDeleted(count); foreach (var item in workItems) { item.Status = WorkItemStatusEnum.DeleteInProgress; } context.Commit(); return workItems; } } catch (Exception) { return new List<WorkItem>(); } }
private void ProcessStudiesInDatabase() { var rulesEngine = RulesEngine.Create(); foreach (var oid in StudyOidList) { try { // TODO (CR Jun 2012): We don't modify any work items - do we need the mutex? using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetStudyBroker(); var study = broker.GetStudy(oid); var studyEntry = study.ToStoreEntry(); var rulesEngineOptions = new RulesEngineOptions { ApplyDeleteActions = _request.ApplyDeleteActions, ApplyRouteActions = _request.ApplyRouteActions }; if(!string.IsNullOrEmpty(_request.RuleId)) { rulesEngine.ApplyStudyRule(studyEntry, _request.RuleId, rulesEngineOptions); } else { rulesEngine.ApplyStudyRules(studyEntry, rulesEngineOptions); } EventsHelper.Fire(_studyProcessedEvent, this, new StudyEventArgs { StudyInstanceUid = study.StudyInstanceUid }); } } catch (Exception x) { Platform.Log(LogLevel.Warn, "Unexpected exception attempting to reapply rules for StudyOid {0}: {1}", oid, x.Message); } } }
private void Publish(bool saveToDatabase) { if (saveToDatabase) { try { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); Item.Progress = Progress; context.Commit(); } } catch (Exception) { // Saw ChangeCOnflictException here a few times } } else Item.Progress = Progress; WorkItemPublishSubscribeHelper.PublishWorkItemChanged(WorkItemsChangedEventType.Update, WorkItemDataHelper.FromWorkItem(Item)); }
private void OnReceiveMoveSeriesRequest(ClearCanvas.Dicom.Network.DicomServer server, byte presentationID, DicomMessage message, IDicomServiceNode remoteAEInfo) { string studyInstanceUid = message.DataSet[DicomTags.StudyInstanceUid].GetString(0, ""); var seriesUids = (string[])message.DataSet[DicomTags.SeriesInstanceUid].Values; lock (_syncLock) { int subOperations = 0; using (var context = new DataAccessContext()) { var results = context.GetStudyStoreQuery().SeriesQuery(new SeriesIdentifier { StudyInstanceUid = studyInstanceUid, }); foreach (SeriesIdentifier series in results) { foreach (string seriesUid in seriesUids) if (series.SeriesInstanceUid.Equals(seriesUid) && series.NumberOfSeriesRelatedInstances.HasValue) { subOperations += series.NumberOfSeriesRelatedInstances.Value; break; } } var s = context.GetStudyStoreQuery().StudyQuery(new StudyRootStudyIdentifier {StudyInstanceUid = studyInstanceUid}); var identifier = CollectionUtils.FirstElement(s); var client = new DicomSendBridge(); client.SendSeries(remoteAEInfo, identifier, seriesUids, WorkItemPriorityEnum.High); _sendOperations.Add(new SendOperationInfo(client.WorkItem, message.MessageId, presentationID, server) { SubOperations = subOperations }); } } }
private DateTime? GetScheduledDeleteTime() { using (var context = new DataAccessContext()) { var broker = context.GetWorkItemBroker(); var items = broker.GetWorkItems(DeleteSeriesRequest.WorkItemTypeString, null, StudyInstanceUid); if (items == null) return null; //Only consider those items that have not yet run, or are still happening. Something that failed, //is being deleted, was canceled, aren't valid. We could have actually received the same series //again after already deleting it, for example. var validItems = items.Where(item => item.Status == WorkItemStatusEnum.Pending || item.Status == WorkItemStatusEnum.InProgress); var deleteItems = validItems .Where(item => item.Request is DeleteSeriesRequest) .Where(item => ((DeleteSeriesRequest) item.Request).SeriesInstanceUids.Contains(SeriesInstanceUid)).ToList(); if (!deleteItems.Any()) return null; return deleteItems.Min(item => item.DeleteTime); } }
private void CheckDeleteStudyCanceled(DataAccessContext context, WorkItem workItem) { // Force the study to be visible again if its a DeleteStudyRequest we're canceling if (workItem.Type.Equals(DeleteStudyRequest.WorkItemTypeString)) { var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(workItem.StudyInstanceUid); if (study != null) { study.Deleted = false; } } }
public WorkItemUpdateResponse Update(WorkItemUpdateRequest request) { var response = new WorkItemUpdateResponse(); using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); var workItem = broker.GetWorkItem(request.Identifier); if (workItem == null) { response.Item = null; return response; } bool deleted = false; if (request.Delete.HasValue && request.Delete.Value) { if (workItem.Status != WorkItemStatusEnum.InProgress) { workItem.Status = WorkItemStatusEnum.Deleted; deleted = true; // If StudyDelete we're removing, "undelete" the study CheckDeleteStudyCanceled(context, workItem); } } if (!deleted) { if (request.ExpirationTime.HasValue) workItem.ExpirationTime = request.ExpirationTime.Value; if (request.Priority.HasValue) workItem.Priority = request.Priority.Value; if (request.Status.HasValue && workItem.Status != WorkItemStatusEnum.InProgress) { workItem.Status = request.Status.Value; if (request.Status.Value == WorkItemStatusEnum.Canceled) workItem.DeleteTime = Platform.Time.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); else if (request.Status.Value == WorkItemStatusEnum.Pending) { workItem.ScheduledTime = Platform.Time; workItem.FailureCount = 0; } // Cache the UserIdentityContext for later use by the shred if (workItem.Request.WorkItemType.Equals(ImportFilesRequest.WorkItemTypeString) && request.Status.Value == WorkItemStatusEnum.Pending) UserIdentityCache.Put(workItem.Oid, UserIdentityContext.CreateFromCurrentThreadPrincipal()); } if (request.ProcessTime.HasValue) workItem.ProcessTime = request.ProcessTime.Value; if (request.Cancel.HasValue && request.Cancel.Value) { if (workItem.Progress == null || workItem.Progress.IsCancelable) { if (workItem.Status.Equals(WorkItemStatusEnum.Idle) || workItem.Status.Equals(WorkItemStatusEnum.Pending)) { workItem.Status = WorkItemStatusEnum.Canceled; // If StudyDelete we're removing, "undelete" the study CheckDeleteStudyCanceled(context, workItem); } else if (workItem.Status.Equals(WorkItemStatusEnum.InProgress)) { // Abort the WorkItem WorkItemProcessor.Instance.Cancel(workItem.Oid); } } } } context.Commit(); response.Item = WorkItemDataHelper.FromWorkItem(workItem); } WorkItemPublishSubscribeHelper.PublishWorkItemChanged(WorkItemsChangedEventType.Update, response.Item); return response; }
/// <summary> /// Postpone a <see cref="WorkItem"/> /// </summary> public void Postpone(TimeSpan delay) { DateTime now = Platform.Time; var workItem = Item.Request as IWorkItemRequestTimeWindow; using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); Item = workItemBroker.GetWorkItem(Item.Oid); if (workItem != null && Item.Priority != WorkItemPriorityEnum.Stat) { DateTime scheduledTime = workItem.GetScheduledTime(now, delay.Seconds); Item.ProcessTime = scheduledTime; Item.ScheduledTime = scheduledTime; } else { Item.ProcessTime = now.Add(delay); } Item.Progress = Progress; if (Item.ProcessTime > Item.ExpirationTime) Item.ExpirationTime = Item.ProcessTime; Item.Status = WorkItemStatusEnum.Pending; context.Commit(); Publish(false); Platform.Log(LogLevel, "Postponing {0} WorkItem for OID {1} until {2}, expires {3}", Item.Type, Item.Oid, Item.ProcessTime.ToLongTimeString(), Item.ExpirationTime.ToLongTimeString()); } }
/// <summary> /// Simple routine for failing a <see cref="WorkItem"/> and rescheduling it at a specified time. /// </summary> /// <param name="failureType"></param> /// <param name="failureTime">The time to reschedule the WorkItem if it isn't a fatal error. </param> /// <param name="maxRetryCount">The maximum number of times the WorkItem should be retried before a fatal error occurs.</param> public void Fail(WorkItemFailureType failureType, DateTime failureTime, int maxRetryCount) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); Item = workItemBroker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; Item.Progress = Progress; Item.FailureCount = Item.FailureCount + 1; Item.DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); if (Item.FailureCount >= maxRetryCount || failureType == WorkItemFailureType.Fatal ) { Item.Status = WorkItemStatusEnum.Failed; Item.ExpirationTime = now; } else { Item.ProcessTime = failureTime; if (Item.ExpirationTime < Item.ProcessTime) Item.ExpirationTime = Item.ProcessTime; Item.Status = WorkItemStatusEnum.Pending; } context.Commit(); } Publish(false); Platform.Log(LogLevel, "Failing {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
/// <summary> /// Called on startup to reset InProgress WorkItems back to Pending. /// </summary> private void ResetInProgressWorkItems() { bool reindexInProgress = false; using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var list = workItemBroker.GetWorkItems(null, WorkItemStatusEnum.InProgress, null); foreach (var item in list) { item.Status = WorkItemStatusEnum.Pending; if (item.Type.Equals(ReindexRequest.WorkItemTypeString)) reindexInProgress = true; } context.Commit(); } using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var list = workItemBroker.GetWorkItems(null, WorkItemStatusEnum.DeleteInProgress, null); foreach (var item in list) { item.Status = WorkItemStatusEnum.Deleted; } context.Commit(); } using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var list = workItemBroker.GetWorkItems(null, WorkItemStatusEnum.Canceling, null); foreach (var item in list) { item.Status = WorkItemStatusEnum.Canceled; if (item.Type.Equals(ReindexRequest.WorkItemTypeString)) reindexInProgress = true; } context.Commit(); } if (reindexInProgress) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var studyBroker = context.GetStudyBroker(); var studyList = studyBroker.GetReindexStudies(); foreach (var item in studyList) { item.Reindex = false; } context.Commit(); } } }
/// <summary> /// Complete a <see cref="WorkItem"/>. /// </summary> public void Complete() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; // Since we're completing, no need for additional status, its done. Progress.StatusDetails = string.Empty; Item.Progress = Progress; Item.ProcessTime = now; Item.ExpirationTime = now; Item.DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); Item.Status = WorkItemStatusEnum.Complete; var uidBroker = context.GetWorkItemUidBroker(); foreach (var entity in Item.WorkItemUids) { uidBroker.Delete(entity); } context.Commit(); } Publish(false); var studyRequest = Item.Request as WorkItemStudyRequest; if (studyRequest != null) Platform.Log(LogLevel.Info, "Completing {0} WorkItem for OID {1}: {2}, {3}:{4}", Item.Type, Item.Oid, Item.Request.ActivityDescription, studyRequest.Patient.PatientsName, studyRequest.Patient.PatientId); else Platform.Log(LogLevel.Info, "Completing {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
private WorkItemQuery() { _context = new DataAccessContext(DataAccessContext.WorkItemMutex); }
public WorkItemQueryResponse Query(WorkItemQueryRequest request) { var response = new WorkItemQueryResponse(); using (var context = new DataAccessContext()) { var broker = context.GetWorkItemBroker(); var dbList = broker.GetWorkItems(request.Type, request.Status, request.StudyInstanceUid, request.Identifier); var results = new List<WorkItemData>(); foreach (var dbItem in dbList) { results.Add(WorkItemDataHelper.FromWorkItem(dbItem)); } response.Items = results.ToArray(); } return response; }
/// <summary> /// Make a <see cref="WorkItem"/> Idle. /// </summary> public void Idle() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; Item.Progress = Progress; Item.ProcessTime = now.AddSeconds(WorkItemServiceSettings.Default.PostponeSeconds); if (Item.ProcessTime > Item.ExpirationTime) Item.ProcessTime = Item.ExpirationTime; Item.Status = WorkItemStatusEnum.Idle; context.Commit(); } Publish(false); Platform.Log(LogLevel, "Idling {0} WorkItem for OID {1} until {2}, expires {3}", Item.Type, Item.Oid, Item.ProcessTime.ToLongTimeString(), Item.ExpirationTime.ToLongTimeString()); }
public WorkItemInsertResponse Insert(WorkItemInsertRequest request) { // TODO (CR Jun 2012): The fact that there is special processing in here for particular types of work items // indicates there is something wrong with the design that may make adding custom work item types difficult. // Maybe the different "processors" need to perform the insert, or at least have some kind of method (rule) // for processing the insert? var response = new WorkItemInsertResponse(); using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { DateTime now = Platform.Time; var broker = context.GetWorkItemBroker(); if (request.Request.WorkItemType.Equals(ReindexRequest.WorkItemTypeString)) { var list = broker.GetWorkItems(request.Request.WorkItemType, null, null); foreach (var workItem in list) { if (workItem.Status == WorkItemStatusEnum.Pending || workItem.Status == WorkItemStatusEnum.InProgress) { response.Item = WorkItemDataHelper.FromWorkItem(workItem); return response; } } } var deleteStudyRequest = request.Request as DeleteStudyRequest; if (deleteStudyRequest != null) { var list = broker.GetWorkItems(request.Request.WorkItemType, null, deleteStudyRequest.Study.StudyInstanceUid); foreach (var workItem in list) { if (workItem.Status == WorkItemStatusEnum.Pending || workItem.Status == WorkItemStatusEnum.InProgress) { // Mark studies to delete as "deleted" in the database. var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(deleteStudyRequest.Study.StudyInstanceUid); if (study != null) { study.Deleted = true; context.Commit(); } response.Item = WorkItemDataHelper.FromWorkItem(workItem); return response; } } } var item = new WorkItem { Request = request.Request, Progress = request.Progress, Type = request.Request.WorkItemType, Priority = request.Request.Priority, ScheduledTime = now.AddSeconds(WorkItemServiceSettings.Default.InsertDelaySeconds), ProcessTime = now.AddSeconds(WorkItemServiceSettings.Default.InsertDelaySeconds), DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes), ExpirationTime = now.AddSeconds(WorkItemServiceSettings.Default.ExpireDelaySeconds), RequestedTime = now, Status = WorkItemStatusEnum.Pending }; var studyRequest = request.Request as WorkItemStudyRequest; if (studyRequest != null) { item.StudyInstanceUid = studyRequest.Study.StudyInstanceUid; if (request.Request.WorkItemType.Equals(DeleteStudyRequest.WorkItemTypeString)) { // Mark studies to delete as "deleted" in the database. var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(studyRequest.Study.StudyInstanceUid); if (study != null) study.Deleted = true; } } broker.AddWorkItem(item); context.Commit(); response.Item = WorkItemDataHelper.FromWorkItem(item); } // Cache the UserIdentityContext for later use by the shred if (request.Request.WorkItemType.Equals(ImportFilesRequest.WorkItemTypeString)) UserIdentityCache.Put(response.Item.Identifier,UserIdentityContext.CreateFromCurrentThreadPrincipal()); WorkItemPublishSubscribeHelper.PublishWorkItemChanged(WorkItemsChangedEventType.Update, response.Item); if (WorkItemProcessor.Instance != null) WorkItemProcessor.Instance.SignalThread(); return response; }
/// <summary> /// Mark <see cref="WorkItem"/> as being in the process of canceling /// </summary> public void Canceling() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); Item = workItemBroker.GetWorkItem(Item.Oid); Item.Progress = Progress; Item.Status = WorkItemStatusEnum.Canceling; context.Commit(); } Publish(false); Platform.Log(LogLevel, "Canceling {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
private void OnReceiveMoveStudiesRequest(ClearCanvas.Dicom.Network.DicomServer server, byte presentationID, DicomMessage message, IDicomServiceNode remoteAEInfo) { IEnumerable<string> studyUids = (string[])message.DataSet[DicomTags.StudyInstanceUid].Values; foreach (string studyUid in studyUids) { lock (_syncLock) { int subOperations = 0; using (var context = new DataAccessContext()) { var s = context.GetStudyStoreQuery().StudyQuery(new StudyRootStudyIdentifier {StudyInstanceUid = studyUid}); var identifier = CollectionUtils.FirstElement(s); if (identifier.NumberOfStudyRelatedInstances.HasValue) subOperations = identifier.NumberOfStudyRelatedInstances.Value; var client = new DicomSendBridge(); client.SendStudy(remoteAEInfo, identifier, WorkItemPriorityEnum.High); _sendOperations.Add(new SendOperationInfo(client.WorkItem, message.MessageId, presentationID, server) { SubOperations = subOperations }); } } } }
/// <summary> /// Cancel a <see cref="WorkItem"/> /// </summary> public void Cancel() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; Item.ProcessTime = now; Item.ExpirationTime = now; Item.DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); Item.Status = WorkItemStatusEnum.Canceled; Item.Progress = Progress; context.Commit(); } Publish(false); Platform.Log(LogLevel, "Canceling {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
private void OnReceiveMoveImageRequest(ClearCanvas.Dicom.Network.DicomServer server, byte presentationID, DicomMessage message, IDicomServiceNode remoteAEInfo) { string studyInstanceUid = message.DataSet[DicomTags.StudyInstanceUid].GetString(0, string.Empty); string seriesInstanceUid = message.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty); var sopInstanceUids = (string[])message.DataSet[DicomTags.SopInstanceUid].Values; lock (_syncLock) { using (var context = new DataAccessContext()) { var s = context.GetStudyStoreQuery().StudyQuery(new StudyRootStudyIdentifier {StudyInstanceUid = studyInstanceUid}); var identifier = CollectionUtils.FirstElement(s); var client = new DicomSendBridge(); client.SendSops(remoteAEInfo, identifier, seriesInstanceUid, sopInstanceUids, WorkItemPriorityEnum.High); _sendOperations.Add(new SendOperationInfo(client.WorkItem, message.MessageId, presentationID, server) { SubOperations = sopInstanceUids.Length }); } } }
/// <summary> /// Delete a <see cref="WorkItem"/>. /// </summary> public void Delete() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); Item.Status = WorkItemStatusEnum.Deleted; broker.Delete(Item); context.Commit(); } Publish(false); Platform.Log(LogLevel, "Deleting {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }