/// <summary> /// Inserts a move request to move one or more series in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="deviceKey">The Key of the device to move the series to.</param> /// <param name="seriesInstanceUids">The Series Instance Uid of the series to be move.</param> /// <returns>A MoveSeries <see cref="WorkQueue"/> entry inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static IList <WorkQueue> MoveSeries(IUpdateContext context, ServerPartition partition, string studyInstanceUid, ServerEntityKey deviceKey, List <string> seriesInstanceUids) { // Find all location of the study in the system and insert series delete request IList <StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(partition.Key, studyInstanceUid); IList <WorkQueue> entries = new List <WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { try { // insert a move series request WorkQueue request = InsertMoveSeriesRequest(context, location, seriesInstanceUids, deviceKey); Debug.Assert(request.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.WebMoveStudy)); entries.Add(request); } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert move request"); if (!ServerHelper.UnlockStudy(location.Key)) { throw new ApplicationException("Unable to unlock the study"); } } } return(entries); }
/// <summary> /// Inserts delete request(s) to delete a series in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="seriesInstanceUids">The Series Instance Uid of the series to be deleted.</param> /// <param name="reason">The reason for deleting the series.</param> /// <returns>A list of DeleteSeries <see cref="WorkQueue"/> entries inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static IList <WorkQueue> DeleteSeries(IUpdateContext context, ServerPartition partition, string studyInstanceUid, List <string> seriesInstanceUids, string reason) { // Find all location of the study in the system and insert series delete request IList <StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(partition.Key, studyInstanceUid); IList <WorkQueue> entries = new List <WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { try { string failureReason; if (ServerHelper.LockStudy(location.Key, QueueStudyStateEnum.WebDeleteScheduled, out failureReason)) { // insert a delete series request WorkQueue request = InsertDeleteSeriesRequest(context, location, seriesInstanceUids, reason); Debug.Assert(request.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.WebDeleteStudy)); entries.Add(request); } else { throw new ApplicationException(String.Format("Unable to lock storage location {0} for deletion : {1}", location.Key, failureReason)); } } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert delete request"); if (!ServerHelper.UnlockStudy(location.Key)) { throw new ApplicationException("Unable to unlock the study"); } } } return(entries); }
protected override string GetTemporaryPath() { StudyStorageLocation storage = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(_item.StudyStorageKey))[0]; if (storage == null) { return(base.GetTemporaryPath()); } else { String basePath = GetTempPathRoot(); if (String.IsNullOrEmpty(basePath)) { basePath = Path.Combine(storage.FilesystemPath, "temp"); } String tempDirectory = Path.Combine(basePath, String.Format("ArchiveQueue-{0}", _item.GetKey())); for (int i = 2; i < 1000; i++) { if (!Directory.Exists(tempDirectory)) { break; } tempDirectory = Path.Combine(basePath, String.Format("ArchiveQueue-{0}({1})", _item.GetKey(), i)); } if (!Directory.Exists(tempDirectory)) { Directory.CreateDirectory(tempDirectory); } return(tempDirectory); } }
public override void DataBind() { ExistingPatientSeriesGridView.DataSource = ReconcileDetails.ExistingStudy.Series; ConflictingPatientSeriesGridView.DataSource = ReconcileDetails.ConflictingStudyInfo.Series; StudyStorage storage = StudyStorage.Load(HttpContextData.Current.ReadContext, StudyIntegrityQueueItem.StudyStorageKey); IList <StudyStorageLocation> studyLocations = StudyStorageLocation.FindStorageLocations(storage); StudyStorageLocation location = studyLocations[0]; StudyLocation.Text = location.GetStudyPath(); ConflictingStudyLocation.Text = ReconcileDetails != null ? ReconcileDetails.GetFolderPath() : SR.NotSpecified; string reason; CanReconcile = _controller.CanReconcile(location, out reason); MessagePanel.Visible = !CanReconcile; AlertMessage.Text = reason; OKButton.Enabled = CanReconcile; OptionRow.Visible = CanReconcile; base.DataBind(); }
public StudyItem(Study study) { _study = study; StudyStorage = StudyStorage.Load(_study.StudyStorageKey); StudyStorageLocation = StudyStorageLocation.FindStorageLocations(StudyStorage).FirstOrDefault(); _status = StudyStorage.StudyStatusEnum.Description; }
public string GetFolderPath() { if (_location == null) { if (_studyStorage == null) { using (IReadContext context = PersistentStoreRegistry.GetDefaultStore().OpenReadContext()) { _studyStorage = StudyStorage.Load(context, _item.StudyStorageKey); } } _location = StudyStorageLocation.FindStorageLocations(_studyStorage)[0]; } String path = Path.Combine(_location.FilesystemPath, _location.PartitionFolder); path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder); if (!string.IsNullOrEmpty(_item.GroupID)) { path = Path.Combine(path, _item.GroupID); } path = Path.Combine(path, _location.StudyInstanceUid); return(path); }
private void LoadStorageLocation() { if (_storageLocation == null) { var studyStorage = StudyStorage.Load(HttpContextData.Current.ReadContext, TheStudyIntegrityQueueItem.StudyStorageKey); _storageLocation = StudyStorageLocation.FindStorageLocations(studyStorage)[0]; } }
private void LoadStorageLocation() { if (_storageLocation == null) { var studyStorage = StudyStorage.Load(HttpContext.Current.GetSharedPersistentContext(), TheStudyIntegrityQueueItem.StudyStorageKey); _storageLocation = StudyStorageLocation.FindStorageLocations(studyStorage)[0]; } }
/// <summary> /// /// </summary> /// <param name="reason"></param> /// <param name="key"></param> /// <exception cref="InvalidStudyStateOperationException">Study is in a state that reprocessing is not allowed</exception> public void ReprocessStudy(String reason, ServerEntityKey key) { StudyStorageAdaptor adaptor = new StudyStorageAdaptor(); StudyStorage storage = adaptor.Get(key); StudyStorageLocation storageLocation = StudyStorageLocation.FindStorageLocations(storage)[0]; StudyReprocessor reprocessor = new StudyReprocessor(); reprocessor.ReprocessStudy(reason, storageLocation, Platform.Time); }
/// <summary> /// Builds a list of <see cref="BaseImageLevelUpdateCommand"/> for the specified study using the specified mapping template. /// </summary> /// <typeparam name="TMappingObject"></typeparam> /// <param name="storage"></param> /// <returns></returns> /// <remarks> /// This method generates a list of <see cref="BaseImageLevelUpdateCommand"/> based on the mapping in <see cref="TMappingObject"/>. /// <see cref="TMappingObject"/> specifies which Dicom fields the application is interested in, using <see cref="DicomFieldAttribute"/>. /// For example, if the application needs to update the study instance uid and study date in an image with what's in the database, /// it will define the mapping class as: /// <code> /// class StudyInfoMapping /// { /// [DicomField(DicomTags.StudyInstanceUid)] /// public String StudyInstanceUid{ /// get{ ... } /// set{ ... } /// } /// /// [DicomField(DicomTags.StudyDate)] /// public String StudyDate{ /// get{ ... } /// set{ ... } /// } /// } /// /// ImageUpdateCommandBuilder builder = new ImageUpdateCommandBuilder(); /// IList<BaseImageLevelUpdateCommand> commandList = builder.BuildCommands<StudyInfoMapping>(studystorage); /// /// DicomFile file = new DicomFile("file.dcm"); /// foreach(BaseImageUpdateCommand command in commandList) /// { /// command.Apply(file); /// } /// /// /// </code> /// /// </remarks> public IList <BaseImageLevelUpdateCommand> BuildCommands <TMappingObject>(StudyStorage storage, IDicomAttributeProvider originalDicomAttributeProvider) { IList <StudyStorageLocation> storageLocationList = StudyStorageLocation.FindStorageLocations(storage); Debug.Assert(storageLocationList != null && storageLocationList.Count > 0); StudyStorageLocation storageLocation = storageLocationList[0]; return(BuildCommands <TMappingObject>(storageLocation, originalDicomAttributeProvider)); }
protected override void OnExecute(CommandProcessor theProcessor) { var locations = StudyStorageLocation.FindStorageLocations(_storage); if (locations.Any(location => location.ArchiveLocations.Any())) { return; } throw new ApplicationException("Cannot purge study which has not been archived"); }
private void DetermineTargetLocation() { if (Context.History.DestStudyStorageKey != null) { _destinationStudyStorage = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(Context.History.DestStudyStorageKey))[0]; } else { _destinationStudyStorage = Context.WorkQueueItemStudyStorage; Context.History.DestStudyStorageKey = _destinationStudyStorage.Key; } }
public PurgeStudyCommand(StudyStorage studyStorage) : base() { var storageLocations = StudyStorageLocation.FindStorageLocations(studyStorage); AddSubCommand(new VerifyStudyHasBeenArchivedCommand(studyStorage)); foreach (var location in storageLocations) { string path = location.GetStudyPath(); AddSubCommand(new Dicom.Utilities.Command.DeleteDirectoryCommand(path, false)); } AddSubCommand(new SetStudyStatusNearlineCommand(studyStorage)); }
public static ReconcileHistoryRecord ReadReconcileRecord(StudyHistory historyRecord) { Platform.CheckTrue(historyRecord.StudyHistoryTypeEnum == StudyHistoryTypeEnum.StudyReconciled, "History record has invalid history record type"); ReconcileHistoryRecord record = new ReconcileHistoryRecord(); record.InsertTime = historyRecord.InsertTime; record.StudyStorageLocation = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(historyRecord.StudyStorageKey))[0]; StudyReconcileDescriptorParser parser = new StudyReconcileDescriptorParser(); record.UpdateDescription = parser.Parse(historyRecord.ChangeDescription); return(record); }
protected override string GetTemporaryPath() { IList <StudyStorageLocation> storages = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(_item.StudyStorageKey)); if (storages == null || storages.Count == 0) { // ??? return(base.GetTemporaryPath()); } ServerFilesystemInfo filesystem = FilesystemMonitor.Instance.GetFilesystemInfo(storages[0].FilesystemKey); if (filesystem == null) { // not ready? return(base.GetTemporaryPath()); } string basePath = GetTempPathRoot(); if (String.IsNullOrEmpty(basePath)) { basePath = Path.Combine(filesystem.Filesystem.FilesystemPath, "temp"); } String tempDirectory = Path.Combine(basePath, String.Format("{0}-{1}", _item.WorkQueueTypeEnum.Lookup, _item.GetKey())); for (int i = 2; i < 1000; i++) { if (!Directory.Exists(tempDirectory)) { break; } tempDirectory = Path.Combine(basePath, String.Format("{0}-{1}({2})", _item.WorkQueueTypeEnum.Lookup, _item.GetKey(), i)); } if (!Directory.Exists(tempDirectory)) { Directory.CreateDirectory(tempDirectory); } return(tempDirectory); }
public override void DataBind() { ExistingPatientSeriesGridView.DataSource = DuplicateEntryDetails.ExistingStudy.Series; ConflictingPatientSeriesGridView.DataSource = DuplicateEntryDetails.ConflictingImageSet.StudyInfo.Series; StudyStorage storage = StudyStorage.Load(HttpContext.Current.GetSharedPersistentContext(), StudyIntegrityQueueItem.StudyStorageKey); IList <StudyStorageLocation> studyLocations = StudyStorageLocation.FindStorageLocations(storage); StudyLocation.Text = studyLocations[0].GetStudyPath(); var entry = new DuplicateSopReceivedQueue(StudyIntegrityQueueItem); DuplicateSopLocation.Text = entry.GetFolderPath(HttpContext.Current.GetSharedPersistentContext()); ComparisonResultGridView.DataSource = DuplicateEntryDetails.QueueData.ComparisonResults; base.DataBind(); }
private void CreateDestinationStudyStorage() { // This really should never happen; if (Context.History.DestStudyStorageKey != null) { _destinationStudyStorage = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(Context.History.DestStudyStorageKey))[0]; return; } string newStudyInstanceUid = string.Empty; // Get the new Study Instance Uid by looking through the update commands foreach (BaseImageLevelUpdateCommand command in Commands) { SetTagCommand setTag = command as SetTagCommand; if (setTag != null && setTag.Tag.TagValue.Equals(DicomTags.StudyInstanceUid)) { newStudyInstanceUid = setTag.Value; break; } } if (string.IsNullOrEmpty(newStudyInstanceUid)) { throw new ApplicationException("Unexpectedly could not find new Study Instance Uid value for Create Study"); } using (ServerCommandProcessor processor = new ServerCommandProcessor("Reconciling image processor")) { // Assign new series and instance uid InitializeStorageCommand command = new InitializeStorageCommand(Context, newStudyInstanceUid, Context.WorkQueueItemStudyStorage.StudyFolder, TransferSyntax.GetTransferSyntax(Context.WorkQueueItemStudyStorage.TransferSyntaxUid)); processor.AddCommand(command); if (!processor.Execute()) { throw new ApplicationException(String.Format("Unable to create Study Storage for study: {0}", newStudyInstanceUid), processor.FailureException); } _destinationStudyStorage = command.Location; } }
public static WebEditStudyHistoryRecord ReadEditRecord(StudyHistory historyRecord) { Platform.CheckTrue(historyRecord.StudyHistoryTypeEnum == StudyHistoryTypeEnum.WebEdited || historyRecord.StudyHistoryTypeEnum == StudyHistoryTypeEnum.ExternalEdit, "History record has invalid history record type"); WebEditStudyHistoryRecord record = new WebEditStudyHistoryRecord { InsertTime = historyRecord.InsertTime, StudyStorageLocation = StudyStorageLocation.FindStorageLocations( StudyStorage.Load(historyRecord.StudyStorageKey))[0], UpdateDescription = XmlUtils.Deserialize <WebEditStudyHistoryChangeDescription>( historyRecord.ChangeDescription) }; return(record); }
/// <summary> /// Inserts edit request(s) to update a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="studyStorageKey">The StudyStorage record key</param> /// <param name="reason">The reason the study is being editted</param> /// <param name="userId">The ID of the user requesting the study edit</param> /// <param name="editType">The request is a web edit request</param> /// <exception cref="InvalidStudyStateOperationException"></exception> /// <param name="updateItems"></param> public static IList <WorkQueue> EditStudy(IUpdateContext context, ServerEntityKey studyStorageKey, List <UpdateItem> updateItems, string reason, string userId, EditType editType) { // Find all location of the study in the system and insert series delete request IList <StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(studyStorageKey); IList <WorkQueue> entries = new List <WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { if (location.StudyStatusEnum.Equals(StudyStatusEnum.OnlineLossy)) { if (location.IsLatestArchiveLossless) { throw new InvalidStudyStateOperationException("Study is lossy but was archived as lossless. It must be restored before editing."); } } try { string failureReason; if (ServerHelper.LockStudy(location.Key, QueueStudyStateEnum.EditScheduled, out failureReason)) { // insert an edit request WorkQueue request = InsertEditStudyRequest(context, location.Key, location.ServerPartitionKey, WorkQueueTypeEnum.WebEditStudy, updateItems, reason, userId, editType); entries.Add(request); } else { throw new ApplicationException(String.Format("Unable to lock storage location {0} for edit : {1}", location.Key, failureReason)); } } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occured when trying to insert edit request"); if (!ServerHelper.UnlockStudy(location.Key)) { throw new ApplicationException("Unable to unlock the study"); } } } return(entries); }
protected override void OnExecute(CommandProcessor theProcessor) { Platform.CheckForNullReference(Context, "Context"); _destinationStudyStorage = Context.History.DestStudyStorageKey != null ? StudyStorageLocation.FindStorageLocations(StudyStorage.Load(Context.History.DestStudyStorageKey))[0] : Context.WorkQueueItemStudyStorage; EnsureStudyCanBeUpdated(_destinationStudyStorage); if (_updateDestination) { UpdateExistingStudy(); } LoadMergedStudyEntities(); try { LoadUidMappings(); if (Context.WorkQueueUidList.Count > 0) { ProcessUidList(); LogResult(); } } finally { UpdateHistory(_destinationStudyStorage); } if (_complete) { StudyRulesEngine engine = new StudyRulesEngine(_destinationStudyStorage, Context.Partition); engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed, theProcessor); } }
private static WorkQueueAlertContextData GetWorkQueueContextData(Model.WorkQueue item) { Platform.CheckForNullReference(item, "item"); WorkQueueAlertContextData contextData = new WorkQueueAlertContextData { WorkQueueItemKey = item.Key.Key.ToString() }; StudyStorage storage = StudyStorage.Load(item.StudyStorageKey); IList <StudyStorageLocation> locations = StudyStorageLocation.FindStorageLocations(storage); if (locations != null && locations.Count > 0) { StudyStorageLocation location = locations[0]; if (location != null) { contextData.ValidationStudyInfo = new ValidationStudyInfo { StudyInstaneUid = location.StudyInstanceUid }; // study info is not always available (eg, when all images failed to process) if (location.Study != null) { contextData.ValidationStudyInfo.AccessionNumber = location.Study.AccessionNumber; contextData.ValidationStudyInfo.PatientsId = location.Study.PatientId; contextData.ValidationStudyInfo.PatientsName = location.Study.PatientsName; contextData.ValidationStudyInfo.ServerAE = location.ServerPartition.AeTitle; contextData.ValidationStudyInfo.StudyDate = location.Study.StudyDate; } } } return(contextData); }
private void LoadMergedStudyEntities() { StudyStorage storage = StudyStorage.Load(_destinationStudyStorage.Key); _destinationStudyStorage = StudyStorageLocation.FindStorageLocations(storage)[0]; }
protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckForNullReference(item, "item"); Platform.CheckForNullReference(item.StudyStorageKey, "item.StudyStorageKey"); bool successful = true; string failureDescription = null; // The processor stores its state in the Data column ReadQueueData(item); if (_queueData.State == null || !_queueData.State.ExecuteAtLeastOnce) { // Added for ticket #9673: // If the study folder does not exist and the study has been archived, trigger a restore and we're done if (!Directory.Exists(StorageLocation.GetStudyPath())) { if (StorageLocation.ArchiveLocations.Count > 0) { Platform.Log(LogLevel.Info, "Reprocessing archived study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} without study data on the filesystem. Inserting Restore Request.", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); // Post process had to be done first so the study is unlocked so the RestoreRequest can be inserted. ServerHelper.InsertRestoreRequest(StorageLocation); RaiseAlert(WorkQueueItem, AlertLevel.Warning, string.Format( "Found study {0} for Patient {1} (A#:{2})on Partition {3} without storage folder, restoring study.", Study.StudyInstanceUid, Study.PatientsName, Study.AccessionNumber, ServerPartition.Description)); return; } } if (Study == null) { Platform.Log(LogLevel.Info, "Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } else { Platform.Log(LogLevel.Info, "Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); } CleanupDatabase(); } else { if (_queueData.State.Completed) { #region SAFE-GUARD CODE: PREVENT INFINITE LOOP // The processor indicated it had completed reprocessing in previous run. The entry should have been removed and this block of code should never be called. // However, we have seen ReprocessStudy entries that mysterously contain rows in the WorkQueueUid table. // The rows prevent the entry from being removed from the database and the ReprocessStudy keeps repeating itself. // update the state first, increment the CompleteAttemptCount _queueData.State.ExecuteAtLeastOnce = true; _queueData.State.Completed = true; _queueData.State.CompleteAttemptCount++; SaveState(item, _queueData); if (_queueData.State.CompleteAttemptCount < 10) { // maybe there was db error in previous attempt to remove the entry. Let's try again. Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} but it was already completed!!!", StorageLocation.StudyInstanceUid); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { // we are definitely stuck. Platform.Log(LogLevel.Error, "ReprocessStudy {0} for study {1} appears stuck. Aborting it.", item.Key, StorageLocation.StudyInstanceUid); item.FailureDescription = "This entry had completed but could not be removed."; PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); } return; #endregion } if (Study == null) { Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } else { Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); } } // As per #12583, Creation of the SopInstanceProcessor should occur after the CleanupDatabase() call. var context = new StudyProcessorContext(StorageLocation, WorkQueueItem); // TODO: Should we enforce the patient's name rule? // If we do, the Study record will have the new patient's name // but how should we handle the name in the Patient record? const bool enforceNameRules = false; var processor = new SopInstanceProcessor(context) { EnforceNameRules = enforceNameRules }; var seriesMap = new Dictionary <string, List <string> >(); StudyXml studyXml = LoadStudyXml(); var reprocessedCounter = 0; var skippedCount = 0; var removedFiles = new List <FileInfo>(); try { // Traverse the directories, process 500 files at a time var isCancelled = FileProcessor.Process(StorageLocation.GetStudyPath(), "*.*", delegate(string path, out bool cancel) { #region Reprocess File var file = new FileInfo(path); // ignore all files except those ending ".dcm" // ignore "bad(0).dcm" files too if (Regex.IsMatch(file.Name.ToUpper(), "[0-9]+\\.DCM$")) { try { var dicomFile = new DicomFile(path); dicomFile.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); string seriesUid = dicomFile.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty); string instanceUid = dicomFile.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty); if (studyXml.Contains(seriesUid, instanceUid)) { if (!seriesMap.ContainsKey(seriesUid)) { seriesMap.Add(seriesUid, new List <string>()); } if (!seriesMap[seriesUid].Contains(instanceUid)) { seriesMap[seriesUid].Add(instanceUid); } else { Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path); } skippedCount++; } else { Platform.Log(ServerPlatform.InstanceLogLevel, "Reprocessing SOP {0} for study {1}", instanceUid, StorageLocation.StudyInstanceUid); string groupId = ServerHelper.GetUidGroup(dicomFile, StorageLocation.ServerPartition, WorkQueueItem.InsertTime); ProcessingResult result = processor.ProcessFile(groupId, dicomFile, studyXml, true, false, null, null, SopInstanceProcessorSopType.ReprocessedSop); switch (result.Status) { case ProcessingStatus.Success: reprocessedCounter++; if (!seriesMap.ContainsKey(seriesUid)) { seriesMap.Add(seriesUid, new List <string>()); } if (!seriesMap[seriesUid].Contains(instanceUid)) { seriesMap[seriesUid].Add(instanceUid); } else { Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path); } break; case ProcessingStatus.Reconciled: Platform.Log(LogLevel.Warn, "SOP was unexpectedly reconciled on reprocess SOP {0} for study {1}. It will be removed from the folder.", instanceUid, StorageLocation.StudyInstanceUid); failureDescription = String.Format("SOP Was reconciled: {0}", instanceUid); // Added for #10620 (Previously we didn't do anything here) // Because we are reprocessing files in the study folder, when file needs to be reconciled it is copied to the reconcile folder // Therefore, we need to delete the one in the study folder. Otherwise, there will be problem when the SIQ entry is reconciled. // InstanceAlreadyExistsException will also be thrown by the SOpInstanceProcessor if this ReprocessStudy WQI // resumes and reprocesses the same file again. // Note: we are sure that the file has been copied to the Reconcile folder and there's no way back. // We must get rid of this file in the study folder. FileUtils.Delete(path); // Special handling: if the file is one which we're supposed to reprocess at the end (see ProcessAdditionalFiles), we must remove the file from the list if (_additionalFilesToProcess != null && _additionalFilesToProcess.Contains(path)) { _additionalFilesToProcess.Remove(path); } break; } } } catch (DicomException ex) { // TODO : should we fail the reprocess instead? Deleting an dicom file can lead to incomplete study. removedFiles.Add(file); Platform.Log(LogLevel.Warn, "Skip reprocessing and delete {0}: Not readable.", path); FileUtils.Delete(path); failureDescription = ex.Message; } } else if (!file.Extension.Equals(".xml") && !file.Extension.Equals(".gz")) { // not a ".dcm" or header file, delete it removedFiles.Add(file); FileUtils.Delete(path); } #endregion if (reprocessedCounter > 0 && reprocessedCounter % 200 == 0) { Platform.Log(LogLevel.Info, "Reprocessed {0} files for study {1}", reprocessedCounter + skippedCount, StorageLocation.StudyInstanceUid); } cancel = reprocessedCounter >= 5000; }, true); if (studyXml != null) { EnsureConsistentObjectCount(studyXml, seriesMap); SaveStudyXml(studyXml); } // Completed if either all files have been reprocessed // or no more dicom files left that can be reprocessed. _completed = reprocessedCounter == 0 || !isCancelled; } catch (Exception e) { successful = false; failureDescription = e.Message; Platform.Log(LogLevel.Error, e, "Unexpected exception when reprocessing study: {0}", StorageLocation.StudyInstanceUid); Platform.Log(LogLevel.Error, "Study may be in invalid unprocessed state. Study location: {0}", StorageLocation.GetStudyPath()); throw; } finally { LogRemovedFiles(removedFiles); // Update the state _queueData.State.ExecuteAtLeastOnce = true; _queueData.State.Completed = _completed; _queueData.State.CompleteAttemptCount++; SaveState(item, _queueData); if (!successful) { FailQueueItem(item, failureDescription); } else { if (!_completed) { // Put it back to Pending PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } else { // Reload the record from the database because referenced entities have been modified since the beginning. // Need to reload because we are passing the location to the rule engine. StorageLocation = CollectionUtils.FirstElement <StudyStorageLocation>(StudyStorageLocation.FindStorageLocations(item.ServerPartitionKey, StorageLocation.StudyInstanceUid), null); LogHistory(); // Run Study / Series Rules Engine. var engine = new StudyRulesEngine(StorageLocation, ServerPartition); engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed); // Log the FilesystemQueue related entries StorageLocation.LogFilesystemQueue(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); Platform.Log(LogLevel.Info, "Completed reprocessing of study {0} on partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } } } }
/// <summary> /// Perform the edit. /// </summary> /// <param name="actionXml">A serialized XML representation of <see cref="SetTagCommand"/> objects</param> /// <returns></returns> public bool Edit(XmlElement actionXml) { Platform.Log(LogLevel.Info, "Starting Edit of study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); LoadExtensions(); EditStudyWorkQueueDataParser parser = new EditStudyWorkQueueDataParser(); EditStudyWorkQueueData data = parser.Parse(actionXml); using (ServerCommandProcessor processor = new ServerCommandProcessor("Web Edit Study")) { // Convert UpdateItem in the request into BaseImageLevelUpdateCommand List <BaseImageLevelUpdateCommand> updateCommands = null; if (data != null) { updateCommands = CollectionUtils.Map <Edit.UpdateItem, BaseImageLevelUpdateCommand>( data.EditRequest.UpdateEntries, delegate(Edit.UpdateItem item) { // Note: For edit, we assume each UpdateItem is equivalent to SetTagCommand return(new SetTagCommand(item.DicomTag.TagValue, item.OriginalValue, item.Value)); } ); } UpdateStudyCommand updateStudyCommand = new UpdateStudyCommand(ServerPartition, StorageLocation, updateCommands, ServerRuleApplyTimeEnum.SopEdited); processor.AddCommand(updateStudyCommand); // Note, this command will only insert the ArchiveQueue command if a delete doesn't exist processor.AddCommand(new InsertArchiveQueueCommand(ServerPartition.Key, StorageLocation.Key)); var context = new WebEditStudyContext { CommandProcessor = processor, EditType = data.EditRequest.EditType, OriginalStudyStorageLocation = StorageLocation, EditCommands = updateCommands, OriginalStudy = Study, OrginalPatient = Patient, UserId = data.EditRequest.UserId, Reason = data.EditRequest.Reason }; OnStudyUpdating(context); if (!processor.Execute()) { Platform.Log(LogLevel.Error, processor.FailureException, "Unexpected failure editing study: {0}", processor.FailureReason); FailureReason = processor.FailureReason; return(false); } // reload the StudyStorageLocation NewStorageLocation = StudyStorageLocation.FindStorageLocations(StorageLocation.StudyStorage)[0]; context.NewStudystorageLocation = NewStorageLocation; OnStudyUpdated(context); if (updateStudyCommand.Statistics != null) { StatisticsLogger.Log(LogLevel.Info, updateStudyCommand.Statistics); } return(true); } }