private void ApplyDefaultDeletionRule(RulesEngineOptions context, StudyEntry study) { if (!context.ApplyDeleteActions) return; // TODO (CR Jun 2012): Again, seem to use "work item" mutex for all database updates. Should just pass in a boolean. using (var dac = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = dac.GetStudyBroker(); var dbStudy = broker.GetStudy(study.Study.StudyInstanceUid); var storageConfiguration = StudyStore.GetConfiguration(); var defaultRule = storageConfiguration.DefaultDeletionRule; if (defaultRule.Enabled) { dbStudy.SetDeleteTime(defaultRule.TimeValue, defaultRule.TimeUnit, TimeOrigin.ReceivedDate, false); } else { dbStudy.ClearDeleteTime(); } dac.Commit(); } }
public bool Process() { // Decided not to use the command processor here, since we're just removing everything and want to be as forgiving as possible. try { DirectoryUtility.DeleteIfExists(_location.StudyFolder); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unable to delete study folder: {0}", _location.StudyFolder); } try { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(_location.Study.StudyInstanceUid); if (study != null) { studyBroker.Delete(study); } context.Commit(); } Platform.Log(LogLevel.Info, "Deleted study for: {0}:{1}", _location.Study.PatientsName, _location.Study.PatientId); return true; } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0} deleting Study related database entries for study: {0}", _location.Study.StudyInstanceUid); return false; } }
/// <summary> /// Cancel a <see cref="WorkItem"/> /// </summary> public void Cancel() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; Item.ProcessTime = now; Item.ExpirationTime = now; Item.DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); Item.Status = WorkItemStatusEnum.Canceled; Item.Progress = Progress; context.Commit(); } Publish(false); Platform.Log(LogLevel, "Canceling {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
/// <summary> /// Mark <see cref="WorkItem"/> as being in the process of canceling /// </summary> public void Canceling() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); Item = workItemBroker.GetWorkItem(Item.Oid); Item.Progress = Progress; Item.Status = WorkItemStatusEnum.Canceling; context.Commit(); } Publish(false); Platform.Log(LogLevel, "Canceling {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
/// <summary> /// Make a <see cref="WorkItem"/> Idle. /// </summary> public void Idle() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; Item.Progress = Progress; Item.ProcessTime = now.AddSeconds(WorkItemServiceSettings.Default.PostponeSeconds); if (Item.ProcessTime > Item.ExpirationTime) Item.ProcessTime = Item.ExpirationTime; Item.Status = WorkItemStatusEnum.Idle; context.Commit(); } Publish(false); Platform.Log(LogLevel, "Idling {0} WorkItem for OID {1} until {2}, expires {3}", Item.Type, Item.Oid, Item.ProcessTime.ToLongTimeString(), Item.ExpirationTime.ToLongTimeString()); }
/// <summary> /// Complete a <see cref="WorkItem"/>. /// </summary> public void Complete() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; // Since we're completing, no need for additional status, its done. Progress.StatusDetails = string.Empty; Item.Progress = Progress; Item.ProcessTime = now; Item.ExpirationTime = now; Item.DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); Item.Status = WorkItemStatusEnum.Complete; var uidBroker = context.GetWorkItemUidBroker(); foreach (var entity in Item.WorkItemUids) { uidBroker.Delete(entity); } context.Commit(); } Publish(false); var studyRequest = Item.Request as WorkItemStudyRequest; if (studyRequest != null) Platform.Log(LogLevel.Info, "Completing {0} WorkItem for OID {1}: {2}, {3}:{4}", Item.Type, Item.Oid, Item.Request.ActivityDescription, studyRequest.Patient.PatientsName, studyRequest.Patient.PatientId); else Platform.Log(LogLevel.Info, "Completing {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
private void DeleteAllWorkItems() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); foreach (var test in broker.GetWorkItems(null, null, null)) broker.Delete(test); context.Commit(); } }
public WorkItemUpdateResponse Update(WorkItemUpdateRequest request) { var response = new WorkItemUpdateResponse(); using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); var workItem = broker.GetWorkItem(request.Identifier); if (workItem == null) { response.Item = null; return response; } bool deleted = false; if (request.Delete.HasValue && request.Delete.Value) { if (workItem.Status != WorkItemStatusEnum.InProgress) { workItem.Status = WorkItemStatusEnum.Deleted; deleted = true; // If StudyDelete we're removing, "undelete" the study CheckDeleteStudyCanceled(context, workItem); } } if (!deleted) { if (request.ExpirationTime.HasValue) workItem.ExpirationTime = request.ExpirationTime.Value; if (request.Priority.HasValue) workItem.Priority = request.Priority.Value; if (request.Status.HasValue && workItem.Status != WorkItemStatusEnum.InProgress) { workItem.Status = request.Status.Value; if (request.Status.Value == WorkItemStatusEnum.Canceled) workItem.DeleteTime = Platform.Time.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); else if (request.Status.Value == WorkItemStatusEnum.Pending) { workItem.ScheduledTime = Platform.Time; workItem.FailureCount = 0; } // Cache the UserIdentityContext for later use by the shred if (workItem.Request.WorkItemType.Equals(ImportFilesRequest.WorkItemTypeString) && request.Status.Value == WorkItemStatusEnum.Pending) UserIdentityCache.Put(workItem.Oid, UserIdentityContext.CreateFromCurrentThreadPrincipal()); } if (request.ProcessTime.HasValue) workItem.ProcessTime = request.ProcessTime.Value; if (request.Cancel.HasValue && request.Cancel.Value) { if (workItem.Progress == null || workItem.Progress.IsCancelable) { if (workItem.Status.Equals(WorkItemStatusEnum.Idle) || workItem.Status.Equals(WorkItemStatusEnum.Pending)) { workItem.Status = WorkItemStatusEnum.Canceled; // If StudyDelete we're removing, "undelete" the study CheckDeleteStudyCanceled(context, workItem); } else if (workItem.Status.Equals(WorkItemStatusEnum.InProgress)) { // Abort the WorkItem WorkItemProcessor.Instance.Cancel(workItem.Oid); } } } } context.Commit(); response.Item = WorkItemDataHelper.FromWorkItem(workItem); } WorkItemPublishSubscribeHelper.PublishWorkItemChanged(WorkItemsChangedEventType.Update, response.Item); return response; }
/// <summary> /// Method for getting next <see cref="WorkItem"/> entry. /// </summary> /// <param name="count">The count.</param> /// <remarks> /// </remarks> /// <returns> /// A <see cref="WorkItem"/> entry if found, or else null; /// </returns> private List<WorkItem> GetWorkItemsToDelete(int count) { try { // Get WorkItems that have expired that need to be deleted using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var workItems = workItemBroker.GetWorkItemsToDelete(count); foreach (var item in workItems) { item.Status = WorkItemStatusEnum.DeleteInProgress; } context.Commit(); if (workItems.Count > 0) return workItems; } // Get entries already marked as deleted by the GUI. using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var workItems = workItemBroker.GetWorkItemsDeleted(count); foreach (var item in workItems) { item.Status = WorkItemStatusEnum.DeleteInProgress; } context.Commit(); return workItems; } } catch (Exception) { return new List<WorkItem>(); } }
private void IncrementTotalFiles(InsertWorkItemCommand insertWorkItemCommand, string studyInstanceUid, string errorMessage = null) { bool foundStudy; lock (_context.StudyWorkItemsSyncLock) foundStudy = _context.StudyWorkItems.ContainsKey(studyInstanceUid); if (foundStudy) { // First image imported already has the TotalFilesToProcess pre-set to 1, so only update after the first var progress = insertWorkItemCommand.WorkItem.Progress as ProcessStudyProgress; if (progress != null) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); insertWorkItemCommand.WorkItem = broker.GetWorkItem(insertWorkItemCommand.WorkItem.Oid); progress = insertWorkItemCommand.WorkItem.Progress as ProcessStudyProgress; if (progress != null) { progress.TotalFilesToProcess++; if (!string.IsNullOrEmpty(errorMessage)) progress.StatusDetails = errorMessage; insertWorkItemCommand.WorkItem.Progress = progress; } context.Commit(); } } } // Save the updated WorkItem, note that this also publishes the workitem automatically lock (_context.StudyWorkItemsSyncLock) _context.StudyWorkItems[studyInstanceUid] = insertWorkItemCommand.WorkItem; }
/// <summary> /// Resets any idle study process work items associated with the files imported in the current import context /// </summary> public void PulseStudyWorkItems() { try { lock (_context.StudyWorkItemsSyncLock) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); var scheduledTime = Platform.Time; foreach (var workItem in _context.StudyWorkItems.Values.Select(x => broker.GetWorkItem(x.Oid)).Where(x => x != null && x.Status == WorkItemStatusEnum.Idle)) { workItem.ProcessTime = scheduledTime; workItem.ExpirationTime = scheduledTime; } context.Commit(); } } } catch (Exception ex) { Platform.Log(LogLevel.Debug, ex, "Failed to pulse study work items"); } }
private void ProcessStudiesInDatabase() { foreach (long oid in StudyOidList) { try { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetStudyBroker(); var study = broker.GetStudy(oid); var location = new StudyLocation(study.StudyInstanceUid); if (!Directory.Exists(location.StudyFolder)) { broker.Delete(study); context.Commit(); EventsHelper.Fire(_studyDeletedEvent, this, new StudyEventArgs { StudyInstanceUid = study.StudyInstanceUid }); Platform.Log(LogLevel.Info, "Deleted Study that wasn't on disk, but in the database: {0}", study.StudyInstanceUid); } else EventsHelper.Fire(_studyProcessedEvent, this, new StudyEventArgs { StudyInstanceUid = study.StudyInstanceUid }); } } catch (Exception x) { Platform.Log(LogLevel.Warn, "Unexpected exception attempting to reindex StudyOid {0}: {1}", oid, x.Message); } if (_cancelRequested) return; } }
private void ResetReindexStudies() { var resetStudyUids = new List<string>(); using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var studyBroker = context.GetStudyBroker(); var studyList = studyBroker.GetReindexStudies(); foreach (var study in studyList) { if (study.Reindex) { resetStudyUids.Add(study.StudyInstanceUid); study.Reindex = false; } } context.Commit(); } if (resetStudyUids.Count > 0) EventsHelper.Fire(_studiesRestoredEvent, this, new StudiesEventArgs { StudyInstanceUids = resetStudyUids }); }
/// <summary> /// Initialize the Reindex. Determine the number of studies in the database and the number of folders on disk to be used /// for progress. /// </summary> public void Initialize() { // Before scanning the study folders, cleanup any empty directories. CleanupFilestoreDirectory(); try { DirectoryList = new List<string>(Directory.GetDirectories(FilestoreDirectory)); } catch (Exception x) { Platform.Log(LogLevel.Error, x); throw; } StudyFoldersToScan = DirectoryList.Count; // TODO (CR Jun 2012): Seems we're using the "work item" mutex for all updates to the database. // Should we just pass in a boolean specifying whether or not to use a mutex? using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetStudyBroker(); StudyOidList = new List<long>(); var studyList = broker.GetStudies(); foreach (var study in studyList) { study.Reindex = true; StudyOidList.Add(study.Oid); } context.Commit(); } DatabaseStudiesToScan = StudyOidList.Count; _threadPool.Start(); }
/// <summary> /// Delete a <see cref="WorkItem"/>. /// </summary> public void Delete() { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); Item.Status = WorkItemStatusEnum.Deleted; broker.Delete(Item); context.Commit(); } Publish(false); Platform.Log(LogLevel, "Deleting {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
private void Publish(bool saveToDatabase) { if (saveToDatabase) { try { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); Item = broker.GetWorkItem(Item.Oid); Item.Progress = Progress; context.Commit(); } } catch (Exception) { // Saw ChangeCOnflictException here a few times } } else Item.Progress = Progress; WorkItemPublishSubscribeHelper.PublishWorkItemChanged(WorkItemsChangedEventType.Update, WorkItemDataHelper.FromWorkItem(Item)); }
/// <summary> /// Simple routine for failing a <see cref="WorkItem"/> and rescheduling it at a specified time. /// </summary> /// <param name="failureType"></param> /// <param name="failureTime">The time to reschedule the WorkItem if it isn't a fatal error. </param> /// <param name="maxRetryCount">The maximum number of times the WorkItem should be retried before a fatal error occurs.</param> public void Fail(WorkItemFailureType failureType, DateTime failureTime, int maxRetryCount) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); Item = workItemBroker.GetWorkItem(Item.Oid); DateTime now = Platform.Time; Item.Progress = Progress; Item.FailureCount = Item.FailureCount + 1; Item.DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes); if (Item.FailureCount >= maxRetryCount || failureType == WorkItemFailureType.Fatal ) { Item.Status = WorkItemStatusEnum.Failed; Item.ExpirationTime = now; } else { Item.ProcessTime = failureTime; if (Item.ExpirationTime < Item.ProcessTime) Item.ExpirationTime = Item.ProcessTime; Item.Status = WorkItemStatusEnum.Pending; } context.Commit(); } Publish(false); Platform.Log(LogLevel, "Failing {0} WorkItem for OID {1}: {2}", Item.Type, Item.Oid, Item.Request.ActivityDescription); }
/// <summary> /// Called on startup to reset InProgress WorkItems back to Pending. /// </summary> private void ResetInProgressWorkItems() { bool reindexInProgress = false; using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var list = workItemBroker.GetWorkItems(null, WorkItemStatusEnum.InProgress, null); foreach (var item in list) { item.Status = WorkItemStatusEnum.Pending; if (item.Type.Equals(ReindexRequest.WorkItemTypeString)) reindexInProgress = true; } context.Commit(); } using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var list = workItemBroker.GetWorkItems(null, WorkItemStatusEnum.DeleteInProgress, null); foreach (var item in list) { item.Status = WorkItemStatusEnum.Deleted; } context.Commit(); } using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); var list = workItemBroker.GetWorkItems(null, WorkItemStatusEnum.Canceling, null); foreach (var item in list) { item.Status = WorkItemStatusEnum.Canceled; if (item.Type.Equals(ReindexRequest.WorkItemTypeString)) reindexInProgress = true; } context.Commit(); } if (reindexInProgress) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var studyBroker = context.GetStudyBroker(); var studyList = studyBroker.GetReindexStudies(); foreach (var item in studyList) { item.Reindex = false; } context.Commit(); } } }
/// <summary> /// Postpone a <see cref="WorkItem"/> /// </summary> public void Postpone(TimeSpan delay) { DateTime now = Platform.Time; var workItem = Item.Request as IWorkItemRequestTimeWindow; using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var workItemBroker = context.GetWorkItemBroker(); Item = workItemBroker.GetWorkItem(Item.Oid); if (workItem != null && Item.Priority != WorkItemPriorityEnum.Stat) { DateTime scheduledTime = workItem.GetScheduledTime(now, delay.Seconds); Item.ProcessTime = scheduledTime; Item.ScheduledTime = scheduledTime; } else { Item.ProcessTime = now.Add(delay); } Item.Progress = Progress; if (Item.ProcessTime > Item.ExpirationTime) Item.ExpirationTime = Item.ProcessTime; Item.Status = WorkItemStatusEnum.Pending; context.Commit(); Publish(false); Platform.Log(LogLevel, "Postponing {0} WorkItem for OID {1} until {2}, expires {3}", Item.Type, Item.Oid, Item.ProcessTime.ToLongTimeString(), Item.ExpirationTime.ToLongTimeString()); } }
public WorkItemInsertResponse Insert(WorkItemInsertRequest request) { // TODO (CR Jun 2012): The fact that there is special processing in here for particular types of work items // indicates there is something wrong with the design that may make adding custom work item types difficult. // Maybe the different "processors" need to perform the insert, or at least have some kind of method (rule) // for processing the insert? var response = new WorkItemInsertResponse(); using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { DateTime now = Platform.Time; var broker = context.GetWorkItemBroker(); if (request.Request.WorkItemType.Equals(ReindexRequest.WorkItemTypeString)) { var list = broker.GetWorkItems(request.Request.WorkItemType, null, null); foreach (var workItem in list) { if (workItem.Status == WorkItemStatusEnum.Pending || workItem.Status == WorkItemStatusEnum.InProgress) { response.Item = WorkItemDataHelper.FromWorkItem(workItem); return response; } } } var deleteStudyRequest = request.Request as DeleteStudyRequest; if (deleteStudyRequest != null) { var list = broker.GetWorkItems(request.Request.WorkItemType, null, deleteStudyRequest.Study.StudyInstanceUid); foreach (var workItem in list) { if (workItem.Status == WorkItemStatusEnum.Pending || workItem.Status == WorkItemStatusEnum.InProgress) { // Mark studies to delete as "deleted" in the database. var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(deleteStudyRequest.Study.StudyInstanceUid); if (study != null) { study.Deleted = true; context.Commit(); } response.Item = WorkItemDataHelper.FromWorkItem(workItem); return response; } } } var item = new WorkItem { Request = request.Request, Progress = request.Progress, Type = request.Request.WorkItemType, Priority = request.Request.Priority, ScheduledTime = now.AddSeconds(WorkItemServiceSettings.Default.InsertDelaySeconds), ProcessTime = now.AddSeconds(WorkItemServiceSettings.Default.InsertDelaySeconds), DeleteTime = now.AddMinutes(WorkItemServiceSettings.Default.DeleteDelayMinutes), ExpirationTime = now.AddSeconds(WorkItemServiceSettings.Default.ExpireDelaySeconds), RequestedTime = now, Status = WorkItemStatusEnum.Pending }; var studyRequest = request.Request as WorkItemStudyRequest; if (studyRequest != null) { item.StudyInstanceUid = studyRequest.Study.StudyInstanceUid; if (request.Request.WorkItemType.Equals(DeleteStudyRequest.WorkItemTypeString)) { // Mark studies to delete as "deleted" in the database. var studyBroker = context.GetStudyBroker(); var study = studyBroker.GetStudy(studyRequest.Study.StudyInstanceUid); if (study != null) study.Deleted = true; } } broker.AddWorkItem(item); context.Commit(); response.Item = WorkItemDataHelper.FromWorkItem(item); } // Cache the UserIdentityContext for later use by the shred if (request.Request.WorkItemType.Equals(ImportFilesRequest.WorkItemTypeString)) UserIdentityCache.Put(response.Item.Identifier,UserIdentityContext.CreateFromCurrentThreadPrincipal()); WorkItemPublishSubscribeHelper.PublishWorkItemChanged(WorkItemsChangedEventType.Update, response.Item); if (WorkItemProcessor.Instance != null) WorkItemProcessor.Instance.SignalThread(); return response; }
private void DeleteWorkItems(IEnumerable<SchedulingTest> list) { using (var context = new DataAccessContext(DataAccessContext.WorkItemMutex)) { var broker = context.GetWorkItemBroker(); foreach (var test in list) { var item = broker.GetWorkItem(test.Processor.Proxy.Item.Oid); broker.Delete(item); } context.Commit(); } }