/// <summary> /// Execute the command /// </summary> /// <param name="updateContext">Database update context.</param> /// <param name="theProcessor">The processor executing the command.</param> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var columns = new ArchiveStudyStorageUpdateColumns { ArchiveTime = Platform.Time, PartitionArchiveKey = _partitionArchiveKey, StudyStorageKey = _studyStorageKey, ArchiveXml = _archiveXml, ServerTransferSyntaxKey = _serverTransferSyntaxKey }; var insertBroker = updateContext.GetBroker<IArchiveStudyStorageEntityBroker>(); ArchiveStudyStorage storage = insertBroker.Insert(columns); var parms = new UpdateArchiveQueueParameters { ArchiveQueueKey = _archiveQueueKey, ArchiveQueueStatusEnum = ArchiveQueueStatusEnum.Completed, ScheduledTime = Platform.Time, StudyStorageKey = _studyStorageKey }; var broker = updateContext.GetBroker<IUpdateArchiveQueue>(); if (!broker.Execute(parms)) throw new ApplicationException("InsertArchiveStudyStorageCommand failed"); }
/// <summary> /// Do the unzip. /// </summary> protected override void OnExecute(CommandProcessor theProcessor) { using (ZipFile zip = new ZipFile(_zipFile)) { zip.ExtractAll(_destinationFolder,_overwrite); } }
protected override void OnExecute(CommandProcessor theProcessor) { if (!File.Exists(_path)) { Platform.Log(LogLevel.Error, "Unexpected error finding file to add to XML {0}", _path); throw new ApplicationException("Unexpected error finding file to add to XML {0}" + _path); } var finfo = new FileInfo(_path); long fileSize = finfo.Length; var dicomFile = new DicomFile(_path); dicomFile.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); _sopInstanceUid = dicomFile.DataSet[DicomTags.SopInstanceUid]; _seriesInstanceUid = dicomFile.DataSet[DicomTags.SeriesInstanceUid]; // Setup the insert parameters if (false == _stream.AddFile(dicomFile, fileSize)) { Platform.Log(LogLevel.Error, "Unexpected error adding SOP to XML Study Descriptor for file {0}", _path); throw new ApplicationException("Unexpected error adding SOP to XML Study Descriptor for SOP: " + dicomFile.MediaStorageSopInstanceUid); } }
protected override void OnExecute(CommandProcessor theProcessor) { if (_commands!=null) { var sq = new OriginalAttributesSequence { ModifiedAttributesSequence = new DicomSequenceItem(), ModifyingSystem = ProductInformation.Component, ReasonForTheAttributeModification = "CORRECT", AttributeModificationDatetime = Platform.Time, SourceOfPreviousValues = _file.SourceApplicationEntityTitle }; foreach (BaseImageLevelUpdateCommand command in _commands) { if (!command.Apply(_file, sq)) throw new ApplicationException( String.Format("Unable to update the duplicate sop. Command={0}", command)); } var sqAttrib = _file.DataSet[DicomTags.OriginalAttributesSequence] as DicomAttributeSQ; if (sqAttrib != null) sqAttrib.AddSequenceItem(sq.DicomSequenceItem); } }
protected override void OnExecute(CommandProcessor theProcessor) { Platform.CheckForNullReference(Context, "Context"); Platform.CheckForNullReference(Context.ReconcileWorkQueueData, "Context.ReconcileWorkQueueData"); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); try { using (var processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid))) { var deleteFile = new FileDeleteCommand(imagePath, true); var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteFile); processor.AddCommand(deleteUid); Platform.Log(ServerPlatform.InstanceLogLevel, deleteFile.ToString()); if (!processor.Execute()) { throw new Exception(String.Format("Unable to discard image {0}", uid.SopInstanceUid)); } } } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception discarding file: {0}", imagePath); SopInstanceProcessor.FailUid(uid, true); } } }
/// <summary> /// Execute the insert. /// </summary> /// <param name="theProcessor">The command processor calling us</param> /// <param name="updateContext">The persistent store connection to use for the update.</param> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var locInsert = updateContext.GetBroker<IInsertStudyStorage>(); var insertParms = new InsertStudyStorageParameters { ServerPartitionKey = _serverPartitionKey, StudyInstanceUid = _studyInstanceUid, Folder = _folder, FilesystemKey = _filesystemKey, QueueStudyStateEnum = QueueStudyStateEnum.Idle }; if (_transfersyntax.LosslessCompressed) { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossless; } else if (_transfersyntax.LossyCompressed) { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossy; } else { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.Online; } // Find one so we don't uselessly process all the results. _location = locInsert.FindOne(insertParms); }
protected override void OnExecute(CommandProcessor theProcessor) { if (Context.ContextStudy == null) { var broker = DataAccessContext.GetStudyBroker(); Context.ContextStudy = broker.GetStudy(_studyInstanceUid); if (Context.ContextStudy == null) { // This is a bit of a hack to handle batch processing of studies Context.ContextStudy = _location.Study; broker.AddStudy(Context.ContextStudy); } } //Only update the store time if the study is actively being received/imported. if (_reason == UpdateReason.LiveImport || Context.ContextStudy.StoreTime == null) Context.ContextStudy.StoreTime = Platform.Time; if (_reason != UpdateReason.SopsDeleted) { //Only update these if the study is being updated in an "additive" way (import/receive/re-index). //A series deletion, for example, should not update these. Context.ContextStudy.Deleted = false; Context.ContextStudy.Reindex = false; } Context.ContextStudy.Update(_studyXml); // TODO (2014-01-11) Rigel - Deal with this better in the database, converted due to ticket #11593 if (Context.ContextStudy.StudyDate < (DateTime)SqlDateTime.MinValue) Context.ContextStudy.StudyDate = null; if (Context.ContextStudy.PatientsBirthDate < (DateTime)SqlDateTime.MinValue) Context.ContextStudy.PatientsBirthDate = null; }
protected override void OnExecute(CommandProcessor theProcessor) { StudyXml currentXml = LoadStudyXml(); _newXml = new StudyXml(_studyInstanceUid); foreach (SeriesXml series in currentXml) { string seriesPath = Path.Combine(_rootPath, series.SeriesInstanceUid); if (!Directory.Exists(seriesPath)) { Platform.Log(LogLevel.Info, "RebuildXML: series folder {0} is missing", seriesPath); continue; } foreach (InstanceXml instance in series) { string instancePath = Path.Combine(seriesPath, instance.SopInstanceUid + ServerPlatform.DicomFileExtension); if (!File.Exists(instancePath)) { Platform.Log(LogLevel.Info, "RebuildXML: file {0} is missing", instancePath); } else { if (!theProcessor.ExecuteSubCommand(this, new InsertInstanceXmlCommand(_newXml, instancePath))) throw new ApplicationException(theProcessor.FailureReason); } } } if (!theProcessor.ExecuteSubCommand(this, new SaveXmlCommand(_newXml, _rootPath, _studyInstanceUid))) throw new ApplicationException(theProcessor.FailureReason); }
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var insert = updateContext.GetBroker<IInsertWorkQueue>(); var parms = new InsertWorkQueueParameters { WorkQueueTypeEnum = WorkQueueTypeEnum.StudyProcess, StudyStorageKey = _storageLocation.GetKey(), ServerPartitionKey = _storageLocation.ServerPartitionKey, SeriesInstanceUid = _message.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty), SopInstanceUid = _message.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty), ScheduledTime = Platform.Time, WorkQueueGroupID = _uidGroupId }; if (_duplicate) { parms.Duplicate = _duplicate; parms.Extension = _extension; parms.UidGroupID = _uidGroupId; } _insertedWorkQueue = insert.FindOne(parms); if (_insertedWorkQueue == null) throw new ApplicationException("UpdateWorkQueueCommand failed"); }
/// <summary> /// Do the work. /// </summary> protected override void OnExecute(CommandProcessor theProcessor) { using (var zipService = Platform.GetService<IZipService>()) { zipService.OpenWrite(_zipFile); zipService.ForceCompress = HsmSettings.Default.CompressZipFiles; zipService.TempFileFolder = _tempFolder; zipService.Comment = String.Format("Archive for study {0}", _studyXml.StudyInstanceUid); // Add the studyXml file zipService.AddFile(Path.Combine(_studyFolder, String.Format("{0}.xml", _studyXml.StudyInstanceUid)), String.Empty); // Add the studyXml.gz file zipService.AddFile(Path.Combine(_studyFolder, String.Format("{0}.xml.gz", _studyXml.StudyInstanceUid)), String.Empty); string uidMapXmlPath = Path.Combine(_studyFolder, "UidMap.xml"); if (File.Exists(uidMapXmlPath)) zipService.AddFile(uidMapXmlPath, String.Empty); // Add each sop from the StudyXmlFile foreach (SeriesXml seriesXml in _studyXml) foreach (InstanceXml instanceXml in seriesXml) { string filename = Path.Combine(_studyFolder, seriesXml.SeriesInstanceUid); filename = Path.Combine(filename, String.Format("{0}.dcm", instanceXml.SopInstanceUid)); zipService.AddFile(filename, seriesXml.SeriesInstanceUid); } zipService.Save(); } }
protected override void OnExecute(CommandProcessor theProcessor) { Backup(); _studyStorageLocation.SaveStudyXml(_studyXml, out _fileCreated); _fileCreated = true; }
protected override void OnExecute(CommandProcessor theProcessor) { long fileSize = 0; if (File.Exists(_file.Filename)) { var finfo = new FileInfo(_file.Filename); fileSize = finfo.Length; } String seriesInstanceUid = _file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty); String sopInstanceUid = _file.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty); if (_studyXml.Contains(seriesInstanceUid,sopInstanceUid)) { _duplicate = true; } // Setup the insert parameters if (false == _studyXml.AddFile(_file, fileSize, _settings)) { Platform.Log(LogLevel.Error, "Unexpected error adding SOP to XML Study Descriptor for file {0}", _file.Filename); throw new ApplicationException("Unexpected error adding SOP to XML Study Descriptor for SOP: " + _file.MediaStorageSopInstanceUid); } if (_writeFile) { // Write it back out. We flush it out with every added image so that if a failure happens, // we can recover properly. bool fileCreated; _studyStorageLocation.SaveStudyXml(_studyXml, out fileCreated); } }
protected override void OnExecute(CommandProcessor theProcessor) { try { if (Directory.Exists(_dir)) { if (DeleteOnlyIfEmpty && !DirectoryUtility.IsEmpty(_dir)) { return; } if (Log) Platform.Log(LogLevel.Info, "Deleting {0}", _dir); Directory.Move(_dir, _dir +".deleted"); _sourceDirRenamed = true; } } catch (Exception ex) { if (_failIfError) throw; // ignore it Platform.Log(LogLevel.Warn, ex, "Unexpected exception occurred when deleting {0}. It is ignored.", _dir); } }
protected override void OnExecute(CommandProcessor theProcessor) { if (Context.ContextStudy == null) { var broker = DataAccessContext.GetStudyBroker(); Context.ContextStudy = broker.GetStudy(_studyInstanceUid); if (Context.ContextStudy == null) { // This is a bit of a hack to handle batch processing of studies Context.ContextStudy = _location.Study; broker.AddStudy(Context.ContextStudy); } } //Only update the store time if the study is actively being received/imported. if (_reason == UpdateReason.LiveImport || Context.ContextStudy.StoreTime == null) Context.ContextStudy.StoreTime = Platform.Time; if (_reason != UpdateReason.SopsDeleted) { //Only update these if the study is being updated in an "additive" way (import/receive/re-index). //A series deletion, for example, should not update these. Context.ContextStudy.Deleted = false; Context.ContextStudy.Reindex = false; } Context.ContextStudy.Update(_studyXml); }
protected override void OnExecute(CommandProcessor theProcessor) { if (String.IsNullOrEmpty(_directory) && GetDirectoryDelegate!=null) { _directory = GetDirectoryDelegate(); } if (Directory.Exists(_directory)) { _created = false; return; } try { Directory.CreateDirectory(_directory); } catch(UnauthorizedAccessException) { //alert the system admin //ServerPlatform.Alert(AlertCategory.System, AlertLevel.Critical, "Filesystem", // AlertTypeCodes.NoPermission, null, TimeSpan.Zero, // "Unauthorized access to {0} from {1}", _directory, ServerPlatform.HostId); throw; } _created = true; }
/// <summary> /// Do the work. /// </summary> protected override void OnExecute(CommandProcessor theProcessor) { using (var zip = new ZipFile(_zipFile)) { zip.ForceNoCompression = !HsmSettings.Default.CompressZipFiles; zip.TempFileFolder = _tempFolder; zip.Comment = String.Format("Archive for study {0}", _studyXml.StudyInstanceUid); zip.UseZip64WhenSaving = Zip64Option.AsNecessary; // Add the studyXml file zip.AddFile(Path.Combine(_studyFolder,String.Format("{0}.xml",_studyXml.StudyInstanceUid)), String.Empty); // Add the studyXml.gz file zip.AddFile(Path.Combine(_studyFolder, String.Format("{0}.xml.gz", _studyXml.StudyInstanceUid)), String.Empty); string uidMapXmlPath = Path.Combine(_studyFolder, "UidMap.xml"); if (File.Exists(uidMapXmlPath)) zip.AddFile(uidMapXmlPath, String.Empty); // Add each sop from the StudyXmlFile foreach (SeriesXml seriesXml in _studyXml) foreach (InstanceXml instanceXml in seriesXml) { string filename = Path.Combine(_studyFolder, seriesXml.SeriesInstanceUid); filename = Path.Combine(filename, String.Format("{0}.dcm", instanceXml.SopInstanceUid)); zip.AddFile(filename, seriesXml.SeriesInstanceUid); } zip.Save(); } }
protected override void OnExecute(CommandProcessor theProcessor) { Platform.CheckTrue(File.Exists(_sourceFile), String.Format("Source file '{0}' doesn't exist", _sourceFile)); if (File.Exists(_destinationFile)) { if (_failIfExists) throw new ApplicationException(String.Format("Destination file already exists: {0}", _destinationFile)); } if (RequiresRollback) Backup(); FileUtils.Copy(_sourceFile, _destinationFile, !_failIfExists); try { if ((File.GetAttributes(_destinationFile) & FileAttributes.ReadOnly) == FileAttributes.ReadOnly) File.SetAttributes(_destinationFile, FileAttributes.Normal); } catch (Exception) { } // Will check for existance FileUtils.Delete(_sourceFile); _sourceRenamed = true; }
/// <summary> /// Apply the rules. /// </summary> /// <remarks> /// When rules are applied, we are simply adding new <see cref="ServerDatabaseCommand"/> instances /// for the rules to the currently executing <see cref="ServerCommandProcessor"/>. They will be /// executed after all other rules have been executed. /// </remarks> protected override void OnExecute(CommandProcessor theProcessor) { string studyXmlFile = Path.Combine(_directory, String.Format("{0}.xml", _studyInstanceUid)); StudyXml theXml = new StudyXml(_studyInstanceUid); if (File.Exists(studyXmlFile)) { using (Stream fileStream = FileStreamOpener.OpenForRead(studyXmlFile, FileMode.Open)) { var theMemento = new StudyXmlMemento(); StudyXmlIo.Read(theMemento, fileStream); theXml.SetMemento(theMemento); fileStream.Close(); } } else { string errorMsg = String.Format("Unable to load study XML file of restored study: {0}", studyXmlFile); Platform.Log(LogLevel.Error, errorMsg); throw new ApplicationException(errorMsg); } DicomFile defaultFile = null; bool rulesExecuted = false; foreach (SeriesXml seriesXml in theXml) { foreach (InstanceXml instanceXml in seriesXml) { // Skip non-image objects if (instanceXml.SopClass.Equals(SopClass.KeyObjectSelectionDocumentStorage) || instanceXml.SopClass.Equals(SopClass.GrayscaleSoftcopyPresentationStateStorageSopClass) || instanceXml.SopClass.Equals(SopClass.BlendingSoftcopyPresentationStateStorageSopClass) || instanceXml.SopClass.Equals(SopClass.ColorSoftcopyPresentationStateStorageSopClass)) { // Save the first one encountered, just in case the whole study is non-image objects. if (defaultFile == null) defaultFile = new DicomFile("test", new DicomAttributeCollection(), instanceXml.Collection); continue; } DicomFile file = new DicomFile("test", new DicomAttributeCollection(), instanceXml.Collection); _context.Message = file; _engine.Execute(_context); rulesExecuted = true; break; } if (rulesExecuted) break; } if (!rulesExecuted && defaultFile != null) { _context.Message = defaultFile; _engine.Execute(_context); } }
/// <summary> /// Do the unzip. /// </summary> protected override void OnExecute(CommandProcessor theProcessor) { using (var zipService = Platform.GetService<IZipService>()) { zipService.OpenRead(_zipFile); zipService.ExtractAll(_destinationFolder,_overwrite); } }
protected override void OnExecute(CommandProcessor theProcessor) { while (_subCommands.Count > 0) { if (!theProcessor.ExecuteSubCommand(this, _subCommands.Dequeue())) throw new ApplicationException(theProcessor.FailureReason); } }
protected override void OnExecute(CommandProcessor theProcessor, ClearCanvas.Enterprise.Core.IUpdateContext updateContext) { IDeleteSeries broker = updateContext.GetBroker<IDeleteSeries>(); DeleteSeriesParameters criteria = new DeleteSeriesParameters(); criteria.StudyStorageKey = _location.Key; criteria.SeriesInstanceUid = _series.SeriesInstanceUid; if (!broker.Execute(criteria)) throw new ApplicationException("Error occurred when calling DeleteSeries"); }
protected override void OnExecute(CommandProcessor theProcessor) { var locations = StudyStorageLocation.FindStorageLocations(_storage); if (locations.Any(location => location.ArchiveLocations.Any())) { return; } throw new ApplicationException("Cannot purge study which has not been archived"); }
public ServerActionContext(DicomMessageBase msg, ServerEntityKey filesystemKey, ServerPartition partition, ServerEntityKey studyLocationKey, CommandProcessor commandProcessor) { Message = msg; ServerPartitionKey = partition.Key; StudyLocationKey = studyLocationKey; FilesystemKey = filesystemKey; ServerPartition = partition; CommandProcessor = commandProcessor; }
/// <summary> /// Do the insertion of the AutoRoute. /// </summary> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { DeviceSelectCriteria deviceSelectCriteria = new DeviceSelectCriteria(); deviceSelectCriteria.AeTitle.EqualTo(_deviceAe); deviceSelectCriteria.ServerPartitionKey.EqualTo(_context.ServerPartitionKey); IDeviceEntityBroker selectDevice = updateContext.GetBroker<IDeviceEntityBroker>(); Device dev = selectDevice.FindOne(deviceSelectCriteria); if (dev == null) { Platform.Log(LogLevel.Warn, "Device '{0}' on partition {1} not in database for autoroute request! Ignoring request.", _deviceAe, _context.ServerPartition.AeTitle); ServerPlatform.Alert( AlertCategory.Application, AlertLevel.Warning, SR.AlertComponentAutorouteRule, AlertTypeCodes.UnableToProcess, null, TimeSpan.FromMinutes(5), SR.AlertAutoRouteUnknownDestination, _deviceAe, _context.ServerPartition.AeTitle); return; } if (!dev.AllowAutoRoute) { Platform.Log(LogLevel.Warn, "Auto-route attempted to device {0} on partition {1} with autoroute support disabled. Ignoring request.", dev.AeTitle, _context.ServerPartition.AeTitle); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, SR.AlertComponentAutorouteRule, AlertTypeCodes.UnableToProcess, null, TimeSpan.FromMinutes(5), SR.AlertAutoRouteDestinationAEDisabled, dev.AeTitle, _context.ServerPartition.AeTitle); return; } InsertWorkQueueParameters parms = new InsertWorkQueueParameters { WorkQueueTypeEnum = WorkQueueTypeEnum.AutoRoute, ScheduledTime = _scheduledTime.HasValue ? _scheduledTime.Value : Platform.Time.AddSeconds(10), StudyStorageKey = _context.StudyLocationKey, ServerPartitionKey = _context.ServerPartitionKey, DeviceKey = dev.GetKey(), SeriesInstanceUid = _context.Message.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty), SopInstanceUid = _context.Message.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty) }; IInsertWorkQueue broker = updateContext.GetBroker<IInsertWorkQueue>(); if (broker.FindOne(parms)==null) { throw new ApplicationException("InsertAutoRouteCommand failed"); } }
protected override void OnExecute(CommandProcessor theProcessor) { // backup if (_studyXml.Contains(_seriesUid)) { Platform.Log(LogLevel.Info, "Removing SOP {0} from StudyXML for study {1}", _sopInstanceUid, _studyInstanceUid); _oldInstanceXml = _studyXml[_seriesUid][_sopInstanceUid]; if (!_studyXml.RemoveInstance(_seriesUid, _sopInstanceUid)) throw new ApplicationException(String.Format("Could not remove SOP Instance {0} from study {1}", _sopInstanceUid, _studyInstanceUid)); } }
protected override void OnExecute(CommandProcessor theProcessor) { if (_map == null) return;// nothing to save _path = Path.Combine(_studyLocation.GetStudyPath(), "UidMap.xml"); if (RequiresRollback) Backup(); _map.Save(_path); }
protected override void OnExecute(CommandProcessor commandProcessor) { _instanceXml = _studyXml.FindInstanceXml(_seriesInstanceUid, _sopInstanceUid); _studyXml.RemoveInstance(_seriesInstanceUid, _sopInstanceUid); // flush it into disk // Write it back out. We flush it out with every added image so that if a failure happens, // we can recover properly. if (!commandProcessor.ExecuteSubCommand(this, new SaveXmlCommand(_studyXml, _studyLocation))) throw new ApplicationException(commandProcessor.FailureReason); }
protected override void OnExecute(CommandProcessor theProcessor) { if (_commands!=null) { foreach (BaseImageLevelUpdateCommand command in _commands) { if (!command.Apply(_file)) throw new ApplicationException( String.Format("Unable to update the duplicate sop. Command={0}", command)); } } }
protected override void OnExecute(CommandProcessor theProcessor) { // Run the rules engine against the object. _engine.Execute(_context); // Do insert into the archival queue. Note that we re-run this with each object processed // so that the scheduled time is pushed back each time. Note, however, if the study only // has one image, we could incorrectly insert an ArchiveQueue request, since the // study rules haven't been run. We re-run the command when the study processed // rules are run to remove out the archivequeue request again, if it isn't needed. _context.CommandProcessor.AddCommand( new InsertArchiveQueueCommand(_context.ServerPartitionKey, _context.StudyLocationKey)); }
protected override void OnExecute(CommandProcessor commandProcessor) { _studyXml.RemoveFile(_file); // flush it into disk // Write it back out. We flush it out with every added image so that if a failure happens, // we can recover properly. string streamFile = _studyLocation.GetStudyXmlPath(); string gzStreamFile = streamFile + ".gz"; WriteStudyStream(streamFile, gzStreamFile, _studyXml); }
protected abstract void OnExecute(CommandProcessor theProcessor);