/// <summary> /// Creates an instance of <see cref="UpdateSeriesCommand"/> to update the existing Series record in the database /// </summary> public UpdateSeriesCommand(StudyStorageLocation storageLocation, ServerActionContext sopContext) : base(String.Concat("Update Series Command")) { _storageLocation = storageLocation; _data = sopContext.Message.DataSet; _context = sopContext; }
/// <summary> /// Retrieves the storage location from the database for the specified study storage key. Checks if the filesystem is online. /// </summary> /// <param name="studyStorageKey"></param> /// <param name="location"></param> /// <returns></returns> public bool GetWritableStudyStorageLocation(ServerEntityKey studyStorageKey, out StudyStorageLocation location) { // NOTE: THIS METHOD SHOULD NOT LOAD THE RECORD FROM THE CACHE using (ServerExecutionContext context = new ServerExecutionContext()) { IQueryStudyStorageLocation procedure = context.ReadContext.GetBroker <IQueryStudyStorageLocation>(); StudyStorageLocationQueryParameters parms = new StudyStorageLocationQueryParameters { StudyStorageKey = studyStorageKey }; IList <StudyStorageLocation> locationList = procedure.Find(parms); foreach (StudyStorageLocation studyLocation in locationList) { string reason; if (CheckFilesystemOnline(studyLocation.FilesystemKey, out reason)) { location = studyLocation; return(true); } } // TODO: throw new FilesystemIsNotWritableException(); location = null; return(false); } }
protected string GetReconcileUidPath(WorkQueueUid sop) { // In 2.0: Path = \\Filesystem Path\Reconcile\GroupID\StudyInstanceUid\*.dcm WorkQueue workqueueItem = Context.WorkQueueItem; if (!String.IsNullOrEmpty(workqueueItem.GroupID)) { StudyStorageLocation storageLocation = Context.WorkQueueItemStudyStorage; string path = Path.Combine(storageLocation.FilesystemPath, storageLocation.PartitionFolder); path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder); path = Path.Combine(path, workqueueItem.GroupID); path = Path.Combine(path, storageLocation.StudyInstanceUid); path = Path.Combine(path, sop.SopInstanceUid + ServerPlatform.DicomFileExtension); return(path); } else { #region BACKWARD-COMPATIBLE CODE // 1.5 SP1, RelativePath is NOT populated for Reconcile Study entry // Path = \\Filesystem Path\Reconcile\GUID\*.dcm // where \\Filesystem Path\Reconcile\GUID = Context.ReconcileWorkQueueData.StoragePath if (String.IsNullOrEmpty(sop.RelativePath)) { string path = Context.ReconcileWorkQueueData.StoragePath; path = Path.Combine(path, sop.SopInstanceUid + ServerPlatform.DicomFileExtension); return(path); } // will this ever happen? return(Path.Combine(Context.ReconcileWorkQueueData.StoragePath, sop.RelativePath)); #endregion } }
private DicomProcessingResult HandleDuplicateFile(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomMessageBase message, string sourceFilename, StudyProcessWorkQueueData data) { Study study = studyLocation.Study ?? studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext); if (study != null) { Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2} Patient: {3} ID:{4})", sopInstanceUid, study.AccessionNumber, study.StudyInstanceUid, study.PatientsName, study.PatientId); } else { Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.", sopInstanceUid, studyLocation.StudyInstanceUid); } var sopProcessingContext = new SopInstanceProcessorContext(commandProcessor, studyLocation, _context.ContextID, _context.Request) { DuplicateProcessing = _context.DuplicateProcessing }; DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, message, data, sourceFilename); return(result); }
/// <summary> /// Validates the state of the study. /// </summary> /// <param name="context">Name of the application</param> /// <param name="studyStorage">The study to validate</param> /// <param name="modes">Specifying what validation to execute</param> public void ValidateStudyState(String context, StudyStorageLocation studyStorage, StudyIntegrityValidationModes modes) { Platform.CheckForNullReference(studyStorage, "studyStorage"); if (modes == StudyIntegrityValidationModes.None) { return; } using (ServerExecutionContext scope = new ServerExecutionContext()) { Study study = studyStorage.LoadStudy(scope.PersistenceContext); if (study != null) { StudyXml studyXml = studyStorage.LoadStudyXml(); if (modes == StudyIntegrityValidationModes.Default || (modes & StudyIntegrityValidationModes.InstanceCount) == StudyIntegrityValidationModes.InstanceCount) { if (studyXml != null && studyXml.NumberOfStudyRelatedInstances != study.NumberOfStudyRelatedInstances) { ValidationStudyInfo validationStudyInfo = new ValidationStudyInfo(study, studyStorage.ServerPartition); throw new StudyIntegrityValidationFailure( ValidationErrors.InconsistentObjectCount, validationStudyInfo, String.Format("Number of instances in database and xml do not match: {0} vs {1}.", study.NumberOfStudyRelatedInstances, studyXml.NumberOfStudyRelatedInstances )); } } } } }
public UpdateStudySizeInDBCommand(StudyStorageLocation location, RebuildStudyXmlCommand rebuildCommand) : base("Update Study Size In DB") { _location = location; _rebuildCommand = rebuildCommand; }
/// <summary> /// Create Duplicate SIQ Entry /// </summary> /// <param name="file"></param> /// <param name="location"></param> /// <param name="sourcePath"></param> /// <param name="queue"></param> /// <param name="uid"></param> public static void CreateDuplicateSIQEntry(DicomFile file, StudyStorageLocation location, string sourcePath, WorkQueue queue, WorkQueueUid uid) { Platform.Log(LogLevel.Info, "Creating Work Queue Entry for duplicate..."); String uidGroup = queue.GroupID ?? queue.GetKey().Key.ToString(); using (var commandProcessor = new ServerCommandProcessor("Insert Work Queue entry for duplicate")) { commandProcessor.AddCommand(new FileDeleteCommand(sourcePath, true)); var sopProcessingContext = new SopProcessingContext(commandProcessor, location, uidGroup); DicomProcessingResult result = Process(sopProcessingContext, file); if (!result.Successful) { FailUid(uid, true); return; } commandProcessor.AddCommand(new DeleteWorkQueueUidCommand(uid)); if (!commandProcessor.Execute()) { Platform.Log(LogLevel.Error, "Unexpected error when creating duplicate study integrity queue entry: {0}", commandProcessor.FailureReason); FailUid(uid, true); } } }
public void Insert(StudyStorageLocation storageLocation, string studyInstanceUid) { lock (_cache) { _cache.Add(studyInstanceUid, storageLocation, null, Cache.NoAbsoluteExpiration, _retentionTime, CacheItemPriority.Normal, null); } }
private void checkBoxLoadTest_CheckedChanged(object sender, EventArgs e) { try { WorkQueueTypeEnum t = WorkQueueTypeEnum.CompressStudy; using (IReadContext read = PersistentStoreRegistry.GetDefaultStore().OpenReadContext()) { IInsertStudyStorage insert = read.GetBroker <IInsertStudyStorage>(); InsertStudyStorageParameters criteria = new InsertStudyStorageParameters(); criteria.StudyInstanceUid = "1.2.3.4"; criteria.FilesystemKey = FilesystemMonitor.Instance.GetFilesystems().GetEnumerator().Current.Filesystem.GetKey(); criteria.Folder = "20070101"; criteria.StudyStatusEnum = StudyStatusEnum.Online; criteria.QueueStudyStateEnum = QueueStudyStateEnum.Idle; IList <StudyStorageLocation> storage = insert.Find(criteria); StudyStorageLocation storageEntry = storage[0]; } } catch (Exception x) { Platform.Log(LogLevel.Error, x); } }
public RemoveInstanceFromStudyXmlCommand(StudyStorageLocation location, StudyXml studyXml, DicomFile file) :base("Remove Instance From Study Xml", true) { _studyLocation = location; _file = file; _studyXml = studyXml; }
private void DoStudyLevelValidation(StudyStorageLocation storageLocation, StudyXml studyXml, Study study, ServerPartition partition) { int xmlNumInstances = studyXml.NumberOfStudyRelatedInstances; int xmlNumSeries = studyXml.NumberOfStudyRelatedSeries; if (study.NumberOfStudyRelatedInstances != xmlNumInstances) { throw new StudyIntegrityValidationFailure( ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(study, partition), String.Format("Number of study related instances in the database ({0}) does not match number of images in the filesystem ({1})", study.NumberOfStudyRelatedInstances, xmlNumInstances)); } if (study.NumberOfStudyRelatedSeries != xmlNumSeries) { throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(study, partition), String.Format("Number of study related series in the database ({0}) does not match number of series in the xml ({1})", study.NumberOfStudyRelatedSeries, xmlNumSeries)); } long dirFileCount = DirectoryUtility.Count(storageLocation.GetStudyPath(), "*" + ServerPlatform.DicomFileExtension, true, null); if (xmlNumInstances != dirFileCount) { throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(study, partition), String.Format("Number of instance in xml ({0}) does not match number of images in the filesystem ({1})", xmlNumInstances, dirFileCount)); } }
protected string[] GetFoldersToRemove() { List <string> folders = new List <string>(); Model.WorkQueue workqueueItem = Context.WorkQueueItem; // Path = \\Filesystem Path\Reconcile\GroupID\StudyInstanceUid\*.dcm if (!String.IsNullOrEmpty(workqueueItem.GroupID)) { StudyStorageLocation storageLocation = Context.WorkQueueItemStudyStorage; string path = Path.Combine(storageLocation.FilesystemPath, storageLocation.PartitionFolder); path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder); path = Path.Combine(path, workqueueItem.GroupID); string studyFolderPath = Path.Combine(path, storageLocation.StudyInstanceUid); folders.Add(studyFolderPath); folders.Add(path); } else { #region BACKWARD-COMPATIBLE CODE string path = Context.ReconcileWorkQueueData.StoragePath; folders.Add(path); #endregion } return(folders.ToArray()); }
public UpdateHistorySeriesMappingCommand(StudyHistory studyHistory, StudyStorageLocation destStudy, UidMapper map) : base("Update Study History Series Mapping") { _map = map; _studyHistory = studyHistory; _destStudy = destStudy; }
/// <summary> /// Load the first instance from the first series of the StudyXml file for a study. /// </summary> /// <param name="location">The storage location of the study.</param> /// <returns></returns> protected static DicomFile LoadInstance(StudyStorageLocation location) { string studyXml = Path.Combine(location.GetStudyPath(), location.StudyInstanceUid + ".xml"); if (!File.Exists(studyXml)) { return null; } FileStream stream = FileStreamOpener.OpenForRead(studyXml, FileMode.Open); var theDoc = new XmlDocument(); StudyXmlIo.Read(theDoc, stream); stream.Close(); stream.Dispose(); var xml = new StudyXml(); xml.SetMemento(theDoc); IEnumerator<SeriesXml> seriesEnumerator = xml.GetEnumerator(); if (seriesEnumerator.MoveNext()) { SeriesXml seriesXml = seriesEnumerator.Current; IEnumerator<InstanceXml> instanceEnumerator = seriesXml.GetEnumerator(); if (instanceEnumerator.MoveNext()) { InstanceXml instance = instanceEnumerator.Current; var file = new DicomFile("file.dcm",new DicomAttributeCollection(), instance.Collection) {TransferSyntax = instance.TransferSyntax}; return file; } } return null; }
public StudyRulesEngine(ServerRulesEngine studyRulesEngine, StudyStorageLocation location, ServerPartition partition, StudyXml studyXml) { _studyRulesEngine = studyRulesEngine; _studyXml = studyXml; _location = location; _partition = partition ?? ServerPartition.Load(_location.ServerPartitionKey); }
private void ValidateSeries(StudyStorageLocation location, Series series, SeriesXml seriesXml) { Study study = location.Study; ServerPartition partition = location.ServerPartition; if (seriesXml == null) { throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(study, partition), String.Format("Series {0} exists in the datbase but not in the study xml", series.SeriesInstanceUid)); } if (series.NumberOfSeriesRelatedInstances != seriesXml.NumberOfSeriesRelatedInstances) { throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(study, partition), String.Format("Number of Series Related Instance in the database and xml for series {0} do not match: {1} vs {2}", series.SeriesInstanceUid, series.NumberOfSeriesRelatedInstances, seriesXml.NumberOfSeriesRelatedInstances)); } long seriesImageCount = DirectoryUtility.Count(location.GetSeriesPath(series.SeriesInstanceUid), "*" + ServerPlatform.DicomFileExtension, true, null); if (seriesXml.NumberOfSeriesRelatedInstances != seriesImageCount) { throw new StudyIntegrityValidationFailure(ValidationErrors.InconsistentObjectCount, new ValidationStudyInfo(study, partition), String.Format("Number of Series Related Instance in the xml for series {0} does not match number of images in the series folder: {1} vs {2}", series.SeriesInstanceUid, seriesXml.NumberOfSeriesRelatedInstances, seriesImageCount)); } }
public UpdateInstanceCountCommand(StudyStorageLocation studyLocation, string seriesInstanceUid, string sopInstanceUid) :base("Update Study Count") { _studyLocation = studyLocation; _seriesInstanceUid = seriesInstanceUid; _sopInstanceUid = sopInstanceUid; }
private static void RemoveStudyStorage(StudyStorageLocation location) { // NOTE: This was an IUpdateContext, however, it was modified to be an IReadContext // after having problems w/ locks on asystem with a fair amount of load. The // updates are just automatically committed within the stored procedure when it // runs... using (IReadContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenReadContext()) { // Setup the delete parameters DeleteStudyStorageParameters parms = new DeleteStudyStorageParameters { ServerPartitionKey = location.ServerPartitionKey, StudyStorageKey = location.Key }; // Get the Insert Instance broker and do the insert IDeleteStudyStorage delete = updateContext.GetBroker<IDeleteStudyStorage>(); if (false == delete.Execute(parms)) { Platform.Log(LogLevel.Error, "Unexpected error when trying to delete study: {0}", location.StudyInstanceUid); } } }
public override void DataBind() { ExistingPatientSeriesGridView.DataSource = ReconcileDetails.ExistingStudy.Series; ConflictingPatientSeriesGridView.DataSource = ReconcileDetails.ConflictingStudyInfo.Series; StudyStorage storage = StudyStorage.Load(HttpContextData.Current.ReadContext, StudyIntegrityQueueItem.StudyStorageKey); IList <StudyStorageLocation> studyLocations = StudyStorageLocation.FindStorageLocations(storage); StudyStorageLocation location = studyLocations[0]; StudyLocation.Text = location.GetStudyPath(); ConflictingStudyLocation.Text = ReconcileDetails != null ? ReconcileDetails.GetFolderPath() : SR.NotSpecified; string reason; CanReconcile = _controller.CanReconcile(location, out reason); MessagePanel.Visible = !CanReconcile; AlertMessage.Text = reason; OKButton.Enabled = CanReconcile; OptionRow.Visible = CanReconcile; base.DataBind(); }
/// <summary> /// Inserts a move request to move one or more Sops in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="deviceKey">The Key of the device to move the series to.</param> /// <param name="seriesInstanceUid">The Series Instance Uid of the series to be move.</param> /// <param name="sopInstanceUids"></param> /// <param name="externalRequest">Optional <see cref="ExternalRequestQueue"/> entry that triggered this move</param> /// <returns>A MoveSeries <see cref="WorkQueue"/> entry inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static IList <WorkQueue> MoveInstance(IUpdateContext context, ServerPartition partition, string studyInstanceUid, string seriesInstanceUid, ServerEntityKey deviceKey, List <string> sopInstanceUids, ExternalRequestQueue externalRequest = null) { // Find all location of the study in the system and insert series delete request IList <StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(partition.Key, studyInstanceUid); IList <WorkQueue> entries = new List <WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { try { // insert a move series request WorkQueue request = InsertMoveInstanceRequest(context, location, seriesInstanceUid, sopInstanceUids, deviceKey, externalRequest); Debug.Assert(request.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.WebMoveStudy)); entries.Add(request); } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert sop level move request"); if (!ServerHelper.UnlockStudy(location.Key)) { throw new ApplicationException("Unable to unlock the study"); } } } return(entries); }
private static IList <StudyHistory> FindReconcileHistories(StudyStorageLocation storageLocation, DicomMessageBase file) { ImageSetDescriptor fileDesc = new ImageSetDescriptor(file.DataSet); List <StudyHistory> studyHistoryList = new List <StudyHistory>( ServerHelper.FindStudyHistories(storageLocation.StudyStorage, new[] { StudyHistoryTypeEnum.StudyReconciled })); IList <StudyHistory> reconcileHistories = studyHistoryList.FindAll( delegate(StudyHistory item) { ImageSetDescriptor desc = XmlUtils.Deserialize <ImageSetDescriptor>(item.StudyData.DocumentElement); return(desc.Equals(fileDesc)); }); if (reconcileHistories.Count == 0) { // no history found in cache... reload the list and search again one more time studyHistoryList = new List <StudyHistory>( ServerHelper.FindStudyHistories(storageLocation.StudyStorage, new[] { StudyHistoryTypeEnum.StudyReconciled })); reconcileHistories = studyHistoryList.FindAll( delegate(StudyHistory item) { ImageSetDescriptor desc = XmlUtils.Deserialize <ImageSetDescriptor>(item.StudyData.DocumentElement); return(desc.Equals(fileDesc)); }); } return(reconcileHistories); }
/// <summary> /// Returns the path to the Reconcile folder for the Partition in the same filesystem <see cref="location"/> where study is stored. /// This is usually \\filesystemPath\PartitionFolder\RECONCILE /// </summary> /// <returns></returns> public static string GetReconcileRootPath(this StudyStorageLocation location) { string path = Path.Combine(location.FilesystemPath, location.PartitionFolder); path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder); return(path); }
public string GetFolderPath() { if (_location == null) { if (_studyStorage == null) { using (IReadContext context = PersistentStoreRegistry.GetDefaultStore().OpenReadContext()) { _studyStorage = StudyStorage.Load(context, _item.StudyStorageKey); } } _location = StudyStorageLocation.FindStorageLocations(_studyStorage)[0]; } String path = Path.Combine(_location.FilesystemPath, _location.PartitionFolder); path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder); if (!string.IsNullOrEmpty(_item.GroupID)) { path = Path.Combine(path, _item.GroupID); } path = Path.Combine(path, _location.StudyInstanceUid); return(path); }
/// <summary> /// Validates the state of the study. /// </summary> /// <param name="context">Name of the application</param> /// <param name="studyStorage">The study to validate</param> /// <param name="modes">Specifying what validation to execute</param> public void ValidateStudyState(String context, StudyStorageLocation studyStorage, StudyIntegrityValidationModes modes) { Platform.CheckForNullReference(studyStorage, "studyStorage"); if (modes == StudyIntegrityValidationModes.None) return; using (ServerExecutionContext scope = new ServerExecutionContext()) { Study study = studyStorage.LoadStudy(scope.PersistenceContext); if (study!=null) { StudyXml studyXml = studyStorage.LoadStudyXml(); if (modes == StudyIntegrityValidationModes.Default || (modes & StudyIntegrityValidationModes.InstanceCount) == StudyIntegrityValidationModes.InstanceCount) { if (studyXml != null && studyXml.NumberOfStudyRelatedInstances != study.NumberOfStudyRelatedInstances) { ValidationStudyInfo validationStudyInfo = new ValidationStudyInfo(study, studyStorage.ServerPartition); throw new StudyIntegrityValidationFailure( ValidationErrors.InconsistentObjectCount, validationStudyInfo, String.Format("Number of instances in database and xml do not match: {0} vs {1}.", study.NumberOfStudyRelatedInstances, studyXml.NumberOfStudyRelatedInstances )); } } } } }
/// <summary> /// Execute the insert. /// </summary> /// <param name="theProcessor">The command processor calling us</param> /// <param name="updateContext">The persistent store connection to use for the update.</param> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var locInsert = updateContext.GetBroker <IInsertStudyStorage>(); var insertParms = new InsertStudyStorageParameters { ServerPartitionKey = _serverPartitionKey, StudyInstanceUid = _studyInstanceUid, Folder = _folder, FilesystemKey = _filesystemKey, QueueStudyStateEnum = QueueStudyStateEnum.Idle }; if (_transfersyntax.LosslessCompressed) { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossless; } else if (_transfersyntax.LossyCompressed) { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossy; } else { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.Online; } // Find one so we don't uselessly process all the results. _location = locInsert.FindOne(insertParms); }
/// <summary> /// Constructor /// </summary> public SeriesSopUpdateCommand(StudyStorageLocation originalStudy, StudyStorageLocation targetStudy, UidMapper uidMapper) : base("SeriesSopUpdateCommand") { _originalStudy = originalStudy; _targetStudy = targetStudy; _uidMapper = uidMapper; }
public RemoveInstanceFromStudyXmlCommand(StudyStorageLocation location, StudyXml studyXml, DicomFile file) : base("Remove Instance From Study Xml", true) { _studyLocation = location; _file = file; _studyXml = studyXml; }
private static void RemoveStudyStorage(StudyStorageLocation location) { // NOTE: This was an IUpdateContext, however, it was modified to be an IReadContext // after having problems w/ locks on asystem with a fair amount of load. The // updates are just automatically committed within the stored procedure when it // runs... using (IReadContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenReadContext()) { // Setup the delete parameters DeleteStudyStorageParameters parms = new DeleteStudyStorageParameters { ServerPartitionKey = location.ServerPartitionKey, StudyStorageKey = location.Key }; // Get the Insert Instance broker and do the insert IDeleteStudyStorage delete = updateContext.GetBroker <IDeleteStudyStorage>(); if (false == delete.Execute(parms)) { Platform.Log(LogLevel.Error, "Unexpected error when trying to delete study: {0}", location.StudyInstanceUid); } } }
protected override string GetTemporaryPath() { StudyStorageLocation storage = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(_item.StudyStorageKey))[0]; if (storage == null) { return(base.GetTemporaryPath()); } else { String basePath = GetTempPathRoot(); if (String.IsNullOrEmpty(basePath)) { basePath = Path.Combine(storage.FilesystemPath, "temp"); } String tempDirectory = Path.Combine(basePath, String.Format("ArchiveQueue-{0}", _item.GetKey())); for (int i = 2; i < 1000; i++) { if (!Directory.Exists(tempDirectory)) { break; } tempDirectory = Path.Combine(basePath, String.Format("ArchiveQueue-{0}({1})", _item.GetKey(), i)); } if (!Directory.Exists(tempDirectory)) { Directory.CreateDirectory(tempDirectory); } return(tempDirectory); } }
/// <summary> /// Inserts delete request(s) to delete a series in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="seriesInstanceUids">The Series Instance Uid of the series to be deleted.</param> /// <param name="reason">The reason for deleting the series.</param> /// <returns>A list of DeleteSeries <see cref="WorkQueue"/> entries inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static IList <WorkQueue> DeleteSeries(IUpdateContext context, ServerPartition partition, string studyInstanceUid, List <string> seriesInstanceUids, string reason) { // Find all location of the study in the system and insert series delete request IList <StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(partition.Key, studyInstanceUid); IList <WorkQueue> entries = new List <WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { try { string failureReason; if (ServerHelper.LockStudy(location.Key, QueueStudyStateEnum.WebDeleteScheduled, out failureReason)) { // insert a delete series request WorkQueue request = InsertDeleteSeriesRequest(context, location, seriesInstanceUids, reason); Debug.Assert(request.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.WebDeleteStudy)); entries.Add(request); } else { throw new ApplicationException(String.Format("Unable to lock storage location {0} for deletion : {1}", location.Key, failureReason)); } } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert delete request"); if (!ServerHelper.UnlockStudy(location.Key)) { throw new ApplicationException("Unable to unlock the study"); } } } return(entries); }
/// <summary> /// Inserts a MoveSopInstance work queue entry /// </summary> /// <param name="context"></param> /// <param name="location"></param> /// <param name="seriesInstanceUid"></param> /// <param name="sopInstanceUids"></param> /// <param name="deviceKey"></param> /// <param name="externalRequest"></param> /// <exception cref="ApplicationException">If the "DeleteSeries" Work Queue entry cannot be inserted.</exception> private static WorkQueue InsertMoveInstanceRequest(IUpdateContext context, StudyStorageLocation location, string seriesInstanceUid, IEnumerable <string> sopInstanceUids, ServerEntityKey deviceKey, ExternalRequestQueue externalRequest) { // Create a work queue entry and append the series instance uid into the WorkQueueUid table WorkQueue moveSopInstance = null; var broker = context.GetBroker <IInsertWorkQueue>(); foreach (string sop in sopInstanceUids) { InsertWorkQueueParameters criteria = new MoveInstanceWorkQueueParameters(location, seriesInstanceUid, sop, deviceKey); if (externalRequest != null) { criteria.ExternalRequestQueueKey = externalRequest.Key; } moveSopInstance = broker.FindOne(criteria); if (moveSopInstance == null) { throw new ApplicationException( String.Format("Unable to insert a Move Sop Instance request for study {0}", location.StudyInstanceUid)); } } return(moveSopInstance); }
public UpdateInstanceCountCommand(StudyStorageLocation studyLocation, string seriesInstanceUid, string sopInstanceUid) : base("Update Study Count") { _studyLocation = studyLocation; _seriesInstanceUid = seriesInstanceUid; _sopInstanceUid = sopInstanceUid; }
/// <summary> /// Helper method to return the path to the duplicate image (in the Reconcile folder) /// </summary> /// <param name="studyStorage"></param> /// <param name="sop"></param> /// <returns></returns> public static String GetDuplicateUidPath(StudyStorageLocation studyStorage, WorkQueueUid sop) { string dupPath = GetDuplicateGroupPath(studyStorage, sop); dupPath = string.IsNullOrEmpty(sop.RelativePath) ? Path.Combine(dupPath, Path.Combine(studyStorage.StudyInstanceUid, sop.SopInstanceUid + "." + sop.Extension)) : Path.Combine(dupPath, sop.RelativePath); #region BACKWARD_COMPATIBILTY_CODE if (string.IsNullOrEmpty(sop.RelativePath) && !File.Exists(dupPath)) { string basePath = Path.Combine(studyStorage.GetStudyPath(), sop.SeriesInstanceUid); basePath = Path.Combine(basePath, sop.SopInstanceUid); if (sop.Extension != null) { dupPath = basePath + "." + sop.Extension; } else { dupPath = basePath + ".dcm"; } } #endregion return(dupPath); }
private static WorkQueueDetails CreateGeneralWorkQueueItemDetails(Model.WorkQueue item) { var detail = new WorkQueueDetails(); detail.Key = item.Key; detail.ScheduledDateTime = item.ScheduledTime; detail.ExpirationTime = item.ExpirationTime; detail.InsertTime = item.InsertTime; detail.FailureCount = item.FailureCount; detail.Type = item.WorkQueueTypeEnum; detail.Status = item.WorkQueueStatusEnum; detail.Priority = item.WorkQueuePriorityEnum; detail.FailureDescription = item.FailureDescription; detail.ServerDescription = item.ProcessorID; StudyStorageLocation storage = WorkQueueController.GetLoadStorageLocation(item); detail.StorageLocationPath = storage.GetStudyPath(); // Fetch UIDs var wqUidsAdaptor = new WorkQueueUidAdaptor(); var uidCriteria = new WorkQueueUidSelectCriteria(); uidCriteria.WorkQueueKey.EqualTo(item.GetKey()); IList <WorkQueueUid> uids = wqUidsAdaptor.Get(uidCriteria); var mapSeries = new Hashtable(); foreach (WorkQueueUid uid in uids) { if (mapSeries.ContainsKey(uid.SeriesInstanceUid) == false) { mapSeries.Add(uid.SeriesInstanceUid, uid.SopInstanceUid); } } detail.NumInstancesPending = uids.Count; detail.NumSeriesPending = mapSeries.Count; // Fetch the study and patient info var ssAdaptor = new StudyStorageAdaptor(); StudyStorage storages = ssAdaptor.Get(item.StudyStorageKey); var studyAdaptor = new StudyAdaptor(); var studycriteria = new StudySelectCriteria(); studycriteria.StudyInstanceUid.EqualTo(storages.StudyInstanceUid); studycriteria.ServerPartitionKey.EqualTo(item.ServerPartitionKey); Study study = studyAdaptor.GetFirst(studycriteria); // Study may not be available until the images are processed. if (study != null) { var studyAssembler = new StudyDetailsAssembler(); detail.Study = studyAssembler.CreateStudyDetail(study); } return(detail); }
/// <summary> /// Creates an instance of <see cref="ReconcileStorage"/> /// </summary> /// <param name="studyLocation">The <see cref="StudyStorageLocation"/> of the study which contains the images to be reconciled.</param> /// <param name="folder">The name of the folder used for storing the images to be reconciled.</param> public ReconcileStorage(StudyStorageLocation studyLocation, string folder) { Platform.CheckForNullReference(studyLocation, "studyLocation"); Platform.CheckForEmptyString(folder, "folder"); _studyLocation = studyLocation; _folder = folder; }
public RemoveInstanceFromStudyXmlCommand(StudyStorageLocation location, StudyXml studyXml, string seriesInstanceUid, string sopInstanceUid) : base("RemoveInstanceFromStudyXmlCommand", true) { _studyLocation = location; _seriesInstanceUid = seriesInstanceUid; _sopInstanceUid = sopInstanceUid; _studyXml = studyXml; }
/// <summary> /// Creates an instance of <see cref="SopInstanceProcessorContext"/> /// </summary> /// <param name="commandProcessor">The <see cref="ServerCommandProcessor"/> used in the context</param> /// <param name="studyLocation">The <see cref="StudyStorageLocation"/> of the study being processed</param> /// <param name="uidGroup">A String value respresenting the group of SOP instances which are being processed.</param> /// <param name="request">An external request that may have triggered this item.</param> public SopInstanceProcessorContext(ServerCommandProcessor commandProcessor, StudyStorageLocation studyLocation, string uidGroup, ExternalRequestQueue request = null) { _commandProcessor = commandProcessor; _studyLocation = studyLocation; _group = uidGroup; _request = request; }
public SeriesPrefetch(StudyStorageLocation storage, string seriesInstanceUid) { _storage = storage; _seriesInstanceUid = seriesInstanceUid; Thread prefetchThread = new Thread(Run); prefetchThread.Start(); }
/// <summary> /// Indicates whether or not the ServerParition duplicate policy is overridden for the specified study /// </summary> /// <param name="studyStorageLocation"></param> /// <returns></returns> public static bool IsParitionDuplicatePolicyOverridden(StudyStorageLocation studyStorageLocation) { var list = GetStudyUIDsWithDuplicatePolicyOverride(); if (list != null && list.Any(uid => uid.Equals(studyStorageLocation.StudyInstanceUid))) return true; return false; }
private void LoadStorageLocation() { if (_storageLocation == null) { var studyStorage = StudyStorage.Load(HttpContext.Current.GetSharedPersistentContext(), TheStudyIntegrityQueueItem.StudyStorageKey); _storageLocation = StudyStorageLocation.FindStorageLocations(studyStorage)[0]; } }
/// <summary> /// Creates an instance of <see cref="AutoReconciler"/> to update /// a DICOM file according to the history. /// </summary> /// <param name="description"></param> /// <param name="storageLocation"></param> public BasePreprocessor(string description, StudyStorageLocation storageLocation) { Platform.CheckForEmptyString(description, "description"); Platform.CheckForNullReference(storageLocation, "storageLocation"); StorageLocation = storageLocation; Description = description; }
public UpdateInstanceCommand(ServerPartition partition, StudyStorageLocation studyLocation, DicomFile file) : base("Update existing SOP Instance") { _partition = partition; _studyLocation = studyLocation; _file = file; }
public InsertInstanceCommand(DicomFile file, StudyStorageLocation location) : base("Insert Instance into Database") { Platform.CheckForNullReference(file, "Dicom File object"); Platform.CheckForNullReference(location, "Study Storage Location"); _file = file; _storageLocation = location; }
public ArchiveStudyCommand(StudyStorageLocation storageLocation, string hsmPath, string tempPath, PartitionArchive archive) { _storageLocation = storageLocation; _hsmPath = hsmPath; _tempPath = tempPath; _archive = archive; CreateSubCommands(); }
public UpdateStudySizeInDBCommand(StudyStorageLocation location) : base("Update Study Size In DB", true) { _location = location; // this may take a few ms so it's better to do it here instead in OnExecute() StudyXml studyXml = _location.LoadStudyXml(); _studySizeInKB = studyXml.GetStudySize() / KB; }
public SaveXmlCommand(StudyXml stream, StudyStorageLocation storageLocation) : base("Insert into Study XML", true) { Platform.CheckForNullReference(stream, "StudyStream object"); Platform.CheckForNullReference(storageLocation, "Study Storage Location"); _stream = stream; _xmlPath = Path.Combine(storageLocation.GetStudyPath(), storageLocation.StudyInstanceUid + ".xml"); _gzPath = _xmlPath + ".gz"; }
static public void Insert(StudyStorageLocation storageLocation, string studyInstanceUId, string seriesInstanceUid, string sopInstanceUid, DicomPixelData pixeldata) { lock (_cache) { string key = String.Format("{0}/{1}/{2}/{3}", storageLocation.GetKey().Key, studyInstanceUId, seriesInstanceUid, sopInstanceUid); _cache.Add(key, pixeldata, null, Cache.NoAbsoluteExpiration, _retentionTime, CacheItemPriority.Normal, null); } }
public InsertStudyXmlCommand(DicomFile file, StudyXml stream, StudyStorageLocation storageLocation) : base("Insert into Study XML", true) { Platform.CheckForNullReference(file, "Dicom File object"); Platform.CheckForNullReference(stream, "StudyStream object"); Platform.CheckForNullReference(storageLocation, "Study Storage Location"); _file = file; _stream = stream; _studyStorageLocation = storageLocation; }
private void DoSeriesLevelValidation(StudyStorageLocation storageLocation, StudyXml studyXml, Study study) { IDictionary<string, Series> seriesList = study.Series; foreach (var entry in seriesList) { Series series = entry.Value; SeriesXml seriesXml = studyXml[series.SeriesInstanceUid]; ValidateSeries(storageLocation, series, seriesXml); } }
public UpdateWorkQueueCommand(DicomMessageBase message, StudyStorageLocation location, bool duplicate, string extension, string uidGroupId) : base("Update/Insert a WorkQueue Entry") { Platform.CheckForNullReference(message, "Dicom Message object"); Platform.CheckForNullReference(location, "Study Storage Location"); _message = message; _storageLocation = location; _duplicate = duplicate; _extension = extension; _uidGroupId = uidGroupId; }
private void DetermineTargetLocation() { if (Context.History.DestStudyStorageKey!=null) { _destinationStudyStorage = StudyStorageLocation.FindStorageLocations(StudyStorage.Load(Context.History.DestStudyStorageKey))[0]; } else { _destinationStudyStorage = Context.WorkQueueItemStudyStorage; Context.History.DestStudyStorageKey = _destinationStudyStorage.Key; } }
public UpdateWorkQueueCommand(DicomMessageBase message, StudyStorageLocation location, bool duplicate, WorkQueueData data=null, WorkQueueUidData uidData=null, ExternalRequestQueue request=null, WorkQueuePriorityEnum priority=null) : base("Update/Insert a WorkQueue Entry") { Platform.CheckForNullReference(message, "Dicom Message object"); Platform.CheckForNullReference(location, "Study Storage Location"); _message = message; _storageLocation = location; _duplicate = duplicate; _data = data; _request = request; _uidData = uidData; _priority = priority; }
public void AddCachedStudy(StudyStorageLocation theLocation) { ServerCache<string, StudyStorageLocation> partitionCache; lock (_lock) { if (!_caches.TryGetValue(theLocation.ServerPartitionKey, out partitionCache)) { partitionCache = new ServerCache<string, StudyStorageLocation>(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(30)); _caches.Add(theLocation.ServerPartitionKey,partitionCache); } } partitionCache.Add(theLocation.StudyInstanceUid, theLocation); }
public static PixelDataManager GetInstance(StudyStorageLocation storage) { string key = storage.Key.ToString(); lock (_cache) { PixelDataManager instance = _cache[key] as PixelDataManager; if (instance == null) { instance = new PixelDataManager(storage); _cache.Add(key, instance, null, Cache.NoAbsoluteExpiration, ImageStreamingServerSettings.Default.CacheRetentionWindow, CacheItemPriority.Default, UnloadPixelDataManager); } return instance; } }
public UpdateStudyCommand(ServerPartition partition, StudyStorageLocation studyLocation, IList<BaseImageLevelUpdateCommand> imageLevelCommands, ServerRuleApplyTimeEnum applyTime) : base("Update existing study") { _partition = partition; _oldStudyLocation = studyLocation; _commands = imageLevelCommands; _statistics = new UpdateStudyStatistics(_oldStudyLocation.StudyInstanceUid); // Load the engine for editing rules. _rulesEngine = new ServerRulesEngine(applyTime, _partition.Key); if (applyTime.Equals(ServerRuleApplyTimeEnum.SopProcessed)) _rulesEngine.AddIncludeType(ServerRuleTypeEnum.AutoRoute); _rulesEngine.Load(); }