protected override void OnExecute(ServerCommandProcessor theProcessor, IUpdateContext updateContext) { Study study = _location.Study ?? Study.Find(updateContext, _location.Key); if (study.StudySizeInKB != _studySizeInKB) { IStudyEntityBroker broker = updateContext.GetBroker<IStudyEntityBroker>(); StudyUpdateColumns parameters = new StudyUpdateColumns() { StudySizeInKB = _studySizeInKB }; if (!broker.Update(study.Key, parameters)) throw new ApplicationException("Unable to update study size in the database"); } }
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { if (_rebuildCommand != null) _studySizeInKB = _rebuildCommand.StudyXml.GetStudySize() / KB; Study study = _location.Study ?? Study.Find(updateContext, _location.Key); if (study != null && study.StudySizeInKB != _studySizeInKB) { var broker = updateContext.GetBroker<IStudyEntityBroker>(); var parameters = new StudyUpdateColumns { StudySizeInKB = _studySizeInKB }; if (!broker.Update(study.Key, parameters)) throw new ApplicationException("Unable to update study size in the database"); } }
public bool DeleteOrderItem(ServerEntityKey partitionKey, ServerEntityKey orderKey) { using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { // Disconnect studies from order var studyBroker = updateContext.GetBroker<IStudyEntityBroker>(); var criteria = new StudySelectCriteria(); criteria.OrderKey.EqualTo(orderKey); criteria.ServerPartitionKey.EqualTo(partitionKey); var updateColumns = new StudyUpdateColumns { OrderKey = null }; studyBroker.Update(criteria, updateColumns); bool retValue = _adaptor.Delete(updateContext, orderKey); updateContext.Commit(); return retValue; } }
private void EnsureConsistentObjectCount(StudyXml studyXml, IDictionary<string, List<string>> processedSeriesMap) { Platform.CheckForNullReference(studyXml, "studyXml"); // We have to ensure that the counts in studyXml and what we have processed are consistent. // Files or folder may be reprocessed but then become missing when then entry is resumed. // We have to removed them from the studyXml before committing the it. Platform.Log(LogLevel.Info, "Verifying study xml against the filesystems"); int filesProcessed = 0; foreach (string seriesUid in processedSeriesMap.Keys) { filesProcessed += processedSeriesMap[seriesUid].Count; } // Used to keep track of the series to be removed. // We can't remove the item from the study xml while we are // interating through it var seriesToRemove = new List<string>(); foreach(SeriesXml seriesXml in studyXml) { if (!processedSeriesMap.ContainsKey(seriesXml.SeriesInstanceUid)) { seriesToRemove.Add(seriesXml.SeriesInstanceUid); } else { //check all instance in the series List<string> foundInstances = processedSeriesMap[seriesXml.SeriesInstanceUid]; var instanceToRemove = new List<string>(); foreach (InstanceXml instanceXml in seriesXml) { if (!foundInstances.Contains(instanceXml.SopInstanceUid)) { // the sop no long exists in the filesystem instanceToRemove.Add(instanceXml.SopInstanceUid); } } foreach(string instanceUid in instanceToRemove) { seriesXml[instanceUid] = null; Platform.Log(LogLevel.Info, "Removed SOP {0} in the study xml: it no longer exists.", instanceUid); } } } foreach(string seriesUid in seriesToRemove) { studyXml[seriesUid] = null; Platform.Log(LogLevel.Info, "Removed Series {0} in the study xml: it no longer exists.", seriesUid); } Platform.CheckTrue(studyXml.NumberOfStudyRelatedSeries == processedSeriesMap.Count, String.Format("Number of series in the xml do not match number of series reprocessed: {0} vs {1}", studyXml.NumberOfStudyRelatedInstances, processedSeriesMap.Count)); Platform.CheckTrue(studyXml.NumberOfStudyRelatedInstances == filesProcessed, String.Format("Number of instances in the xml do not match number of reprocessed: {0} vs {1}", studyXml.NumberOfStudyRelatedInstances, filesProcessed)); Platform.Log(LogLevel.Info, "Study xml has been verified."); if (StorageLocation.Study != null) { // update the instance count in the db using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { var broker = updateContext.GetBroker<IStudyEntityBroker>(); var columns = new StudyUpdateColumns { NumberOfStudyRelatedInstances = studyXml.NumberOfStudyRelatedInstances, NumberOfStudyRelatedSeries = studyXml.NumberOfStudyRelatedSeries }; broker.Update(StorageLocation.Study.GetKey(), columns); updateContext.Commit(); } } else { // alert orphaned StudyStorage entry RaiseAlert(WorkQueueItem, AlertLevel.Critical, String.Format("Study {0} has been reprocessed but Study record was NOT created. Images reprocessed: {1}. Path={2}", StorageLocation.StudyInstanceUid, filesProcessed, StorageLocation.GetStudyPath())); } }
public bool UpdateStudy(Study study, StudyUpdateColumns columns) { return _adaptor.Update(study.Key, columns); }
/// <summary> /// Called after the <see cref="WorkQueue"/> item has been processed /// </summary> /// <param name="item">The work queue item which has been processed.</param> protected virtual void OnProcessItemEnd(Model.WorkQueue item) { // Update the study size if (Completed) { Study theStudy = Study ?? Study.Find(ServerExecutionContext.Current.ReadContext, item.StudyStorageKey); if (theStudy!=null) { if (item.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.MigrateStudy)) StorageLocation = CollectionUtils.FirstElement<StudyStorageLocation>( StudyStorageLocation.FindStorageLocations(item.ServerPartitionKey,theStudy.StudyInstanceUid),null); if (File.Exists(StorageLocation.GetStudyXmlPath())) { StudyXml studyXml = StorageLocation.LoadStudyXml(true /* reload, in case it's changed */); var size = (decimal) (studyXml.GetStudySize()/KB); // only update if it's out-of-date if (theStudy.StudySizeInKB != size) { using ( IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext( UpdateContextSyncMode.Flush)) { var broker = ctx.GetBroker<IStudyEntityBroker>(); var parameters = new StudyUpdateColumns {StudySizeInKB = size}; if (broker.Update(theStudy.Key, parameters)) ctx.Commit(); } } } } } }
static public Study Insert(IUpdateContext update, Study entity) { var broker = update.GetBroker<IStudyEntityBroker>(); var updateColumns = new StudyUpdateColumns(); updateColumns.StudyInstanceUid = entity.StudyInstanceUid; updateColumns.ServerPartitionKey = entity.ServerPartitionKey; updateColumns.PatientKey = entity.PatientKey; updateColumns.NumberOfStudyRelatedSeries = entity.NumberOfStudyRelatedSeries; updateColumns.NumberOfStudyRelatedInstances = entity.NumberOfStudyRelatedInstances; updateColumns.StudySizeInKB = entity.StudySizeInKB; updateColumns.ResponsiblePerson = entity.ResponsiblePerson; updateColumns.ResponsibleOrganization = entity.ResponsibleOrganization; updateColumns.QueryXml = entity.QueryXml; updateColumns.SpecificCharacterSet = entity.SpecificCharacterSet; updateColumns.StudyStorageKey = entity.StudyStorageKey; updateColumns.PatientsName = entity.PatientsName; updateColumns.PatientId = entity.PatientId; updateColumns.IssuerOfPatientId = entity.IssuerOfPatientId; updateColumns.PatientsBirthDate = entity.PatientsBirthDate; updateColumns.PatientsAge = entity.PatientsAge; updateColumns.PatientsSex = entity.PatientsSex; updateColumns.StudyDate = entity.StudyDate; updateColumns.StudyTime = entity.StudyTime; updateColumns.AccessionNumber = entity.AccessionNumber; updateColumns.StudyId = entity.StudyId; updateColumns.StudyDescription = entity.StudyDescription; updateColumns.ReferringPhysiciansName = entity.ReferringPhysiciansName; Study newEntity = broker.Insert(updateColumns); return newEntity; }
private void UpdateDatabase() { var patientUpdate = new PatientUpdateColumns(); var seriesUpdate = new SeriesUpdateColumns(); var studyUpdate = new StudyUpdateColumns(); // Update Patient level info. Different cases can occur here: // A) Patient demographic info is not changed ==> update the current patient // B) New patient demographics matches (another) existing patient in the datbase // ==> Transfer the study to that patient. This means the study count on both patients must be updated. // The current patient should also be deleted if there's no more study attached to it after the transfer. // C) New patient demographics doesn't match any patient in the database // ==> A new patient should be created for this study. The study count on the current patient should be updated // and the patient should also be deleted if this is the only study attached to it. if (_patientInfoIsNotChanged) { _newPatient = _curPatient; } else if (_newPatient == null) { // No matching patient in the database. We should create a new patient for this study _newPatient = CreateNewPatient(_newPatientInfo); } else { // There's already patient in the database with the new patient demographics // The study should be attached to that patient. TransferStudy(_study.Key, _oldPatientInfo, _newPatient); } // Copy the existing valus over into the study & series objects // Note, this sets up an update statement that will update the key columns for // Study Instance UID, Series Instance UID, however, note that the columns will not // actually change value. Its alittle ugly, but it will make it so if we add new // columns in the future, it just "works". _file.DataSet.LoadDicomFields(patientUpdate); _file.DataSet.LoadDicomFields(studyUpdate); _file.DataSet.LoadDicomFields(seriesUpdate); // Get any extensions that exist and process them var ep = new ProcessorInsertExtensionPoint(); var extensions = ep.CreateExtensions(); foreach (IInsertExtension e in extensions) e.UpdateExtension(_partition.Key, patientUpdate,studyUpdate,seriesUpdate, _file); UpdatePatientEncoding(_newPatient, patientUpdate); SetStudyEncoding(_study, studyUpdate); // Update the Study table var patientUpdateBroker = UpdateContext.GetBroker<IPatientEntityBroker>(); patientUpdateBroker.Update(_newPatient.Key, patientUpdate); // Update the Study table var studyUpdateBroker = UpdateContext.GetBroker<IStudyEntityBroker>(); studyUpdateBroker.Update(_study.Key, studyUpdate); // Update the Series table var seriesUpdateBroker = UpdateContext.GetBroker<ISeriesEntityBroker>(); seriesUpdateBroker.Update(_curSeries.Key, seriesUpdate); // If the Request Attributes Sequence is in the dataset, do an insert. // Small hole in this that if the value of this sequence has changed, both the old and // the new values will stay in the database, not much to do about it, except // reprocess the whole series, which doesn't seem worth it. if (_file.DataSet.Contains(DicomTags.RequestAttributesSequence)) { var attribute = _file.DataSet[DicomTags.RequestAttributesSequence] as DicomAttributeSQ; if (attribute != null && !attribute.IsEmpty) { foreach (DicomSequenceItem sequenceItem in (DicomSequenceItem[])attribute.Values) { var requestParms = new RequestAttributesInsertParameters(); sequenceItem.LoadDicomFields(requestParms); requestParms.SeriesKey = _curSeries.Key; var insertRequest = UpdateContext.GetBroker<IInsertRequestAttributes>(); insertRequest.Execute(requestParms); } } } }
private void SetStudyEncoding(Study study, StudyUpdateColumns update) { // set the SpecificCharacterSet of the patient and study record. This will update the database // and force Patient/Study/Series level query response to be encoded in UTF8. Image level responses // will be encoded using the character set in the image (see QueryScpExtension) // // Only update the db if necessary if (IsUTF8(study.SpecificCharacterSet)) { update.SpecificCharacterSet = UTF8; } }