public StudyRulesEngine(ServerRulesEngine studyRulesEngine, StudyStorageLocation location, ServerPartition partition, StudyXml studyXml)
		{
			_studyRulesEngine = studyRulesEngine;
			_studyXml = studyXml;
			_location = location;
			_partition = partition ?? ServerPartition.Load(_location.ServerPartitionKey);
		}
        /// <summary>
        /// Load the <see cref="ServerRulesEngine"/> for each partition.
        /// </summary>
        private void LoadRulesEngine()
        {
            using (var context = new ServerExecutionContext())
            {
                var broker   = context.ReadContext.GetBroker <IServerPartitionEntityBroker>();
                var criteria = new ServerPartitionSelectCriteria();
                IList <ServerPartition> partitions = broker.Find(criteria);

                foreach (ServerPartition partition in partitions)
                {
                    //TODO CR (Jan 2014) - Cached engine not being used now, due to #11673
                    var engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);
                    engine.Load();
                    _engines.Add(partition, engine);

                    engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyArchived, partition.Key);
                    engine.Load();
                    _postArchivalEngines.Add(partition, engine);

                    engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);
                    engine.AddIncludeType(ServerRuleTypeEnum.DataAccess);
                    engine.Load();
                    _dataAccessEngine.Add(partition, engine);
                }
            }
        }
		public ApplyRulesCommand(string directory, string studyInstanceUid, ServerActionContext context) : base("Apply Server Rules", true)
		{
			_directory = directory;
			_studyInstanceUid = studyInstanceUid;
			_context = context;
			_engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyRestored, _context.ServerPartitionKey);
			_engine.Load();
		}
 public ApplyRulesCommand(string directory, string studyInstanceUid, ServerActionContext context) : base("Apply Server Rules", true)
 {
     _directory        = directory;
     _studyInstanceUid = studyInstanceUid;
     _context          = context;
     _engine           = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyRestored, _context.ServerPartitionKey);
     _engine.Load();
 }
Beispiel #5
0
 public UpdateStudyCommand(ServerPartition partition,
                           StudyStorageLocation studyLocation,
                           IList <BaseImageLevelUpdateCommand> imageLevelCommands,
                           ServerRuleApplyTimeEnum applyTime)
     : base("Update existing study")
 {
     _partition        = partition;
     _oldStudyLocation = studyLocation;
     _commands         = imageLevelCommands;
     _statistics       = new UpdateStudyStatistics(_oldStudyLocation.StudyInstanceUid);
     // Load the engine for editing rules.
     _rulesEngine = new ServerRulesEngine(applyTime, _partition.Key);
     if (applyTime.Equals(ServerRuleApplyTimeEnum.SopProcessed))
     {
         _rulesEngine.AddIncludeType(ServerRuleTypeEnum.AutoRoute);
     }
     _rulesEngine.Load();
 }
        protected override bool OnServerSideEvaluate()
        {
            String ruleXml = GetControlValidationValue(ControlToValidate);

            if (String.IsNullOrEmpty(ruleXml))
            {
                ErrorMessage = ValidationErrors.ServerRuleXMLIsMissing;
                return(false);
            }

            if (RuleTypeControl.Equals(ServerRuleTypeEnum.DataAccess.Lookup))
            {
                // Validated DataAccess rules only have the condition.  Make a fake
                // rule that includes a non-op action
                ruleXml = String.Format("<rule>{0}<action><no-op/></action></rule>", ruleXml);
            }

            var theDoc = new XmlDocument();

            try
            {
                theDoc.LoadXml(ruleXml);
            }
            catch (Exception e)
            {
                ErrorMessage = String.Format(ValidationErrors.UnableToParseServerRuleXML, e.Message);
                return(false);
            }

            string error;

            if (false == Rule <ServerActionContext> .ValidateRule(
                    theDoc,
                    ServerRulesEngine.GetSpecificationCompiler(),
                    ServerRulesEngine.GetActionCompiler(ServerRuleTypeEnum.GetEnum(RuleTypeControl)),
                    out error))
            {
                ErrorMessage = error;
                return(false);
            }

            return(true);
        }
		public void Apply(ServerRuleApplyTimeEnum applyTime, CommandProcessor theProcessor)
		{
			try
			{
				if (_studyRulesEngine == null || !_studyRulesEngine.RuleApplyTime.Equals(applyTime))
				{
					_studyRulesEngine = new ServerRulesEngine(applyTime, _location.ServerPartitionKey);
					_studyRulesEngine.Load();
				}

				List<string> files = GetFirstInstanceInEachStudySeries();
				if (files.Count == 0)
				{
					string message =
						String.Format("Unexpectedly unable to find SOP instances for rules engine in each series in study: {0}",
						              _location.StudyInstanceUid);
					Platform.Log(LogLevel.Error, message);
					throw new ApplicationException(message);
				}

				Platform.Log(LogLevel.Info, "Processing Study Level rules for study {0} on partition {1} at {2} apply time",
				             _location.StudyInstanceUid, _partition.Description, applyTime.Description);

				foreach (string seriesFilePath in files)
				{
					var theFile = new DicomFile(seriesFilePath);
					theFile.Load(DicomReadOptions.Default);
					var context =
						new ServerActionContext(theFile, _location.FilesystemKey, _partition, _location.Key, theProcessor){ RuleEngine = _studyRulesEngine};
					_studyRulesEngine.Execute(context);

					ProcessSeriesRules(theFile, theProcessor);
				}

				if (applyTime.Equals(ServerRuleApplyTimeEnum.StudyProcessed))
				{
					// This is a bit kludgy, but we had a problem with studies with only 1 image incorectlly
					// having archive requests inserted when they were scheduled for deletion.  Calling
					// this command here so that if a delete is inserted at the study level, we will remove
					// the previously inserted archive request for the study.  Note also this has to be done
					// after the rules engine is executed.
					theProcessor.AddCommand(new InsertArchiveQueueCommand(_location.ServerPartitionKey, _location.Key));
				}
			}
			finally
			{
				if (_studyRulesEngine!=null)
					_studyRulesEngine.Complete(_studyRulesEngine.RulesApplied);
			}


			
		}
		public ApplySopRulesCommand(ServerActionContext context, ServerRulesEngine engine)
			: base("Apply SOP Rules Engine and insert Archival Request", false)
		{
			_context = context;
			_engine = engine;
		}
        /// <summary>
        /// Load the <see cref="ServerRulesEngine"/> for each partition.
        /// </summary>
        private void LoadRulesEngine()
        {
			using (var context = new ServerExecutionContext())
			{
				var broker = context.ReadContext.GetBroker<IServerPartitionEntityBroker>();
				var criteria = new ServerPartitionSelectCriteria();
				IList<ServerPartition> partitions = broker.Find(criteria);

				foreach (ServerPartition partition in partitions)
				{
					//TODO CR (Jan 2014) - Cached engine not being used now, due to #11673
					var engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);
                    engine.Load();
                    _engines.Add(partition, engine);
					
					engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyArchived, partition.Key);
                    engine.Load();
                    _postArchivalEngines.Add(partition, engine);

                    engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);
				    engine.AddIncludeType(ServerRuleTypeEnum.DataAccess);
                    engine.Load(); 
                    _dataAccessEngine.Add(partition, engine);
                    
				}
			}
        }
        /// <summary>
        /// Reprocess a file systems
        /// </summary>
        /// <param name="partition"></param>
        private void ReprocessPartition(ServerPartition partition)
        {
            var engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);

            engine.Load();

            var postArchivalEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyArchived, partition.Key);

            postArchivalEngine.Load();

            var dataAccessEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);

            dataAccessEngine.AddIncludeType(ServerRuleTypeEnum.DataAccess);
            dataAccessEngine.Load();

            var filesystems = FilesystemMonitor.Instance.GetFilesystems();

            foreach (var f in filesystems)
            {
                var partitionDir  = Path.Combine(f.Filesystem.FilesystemPath, partition.PartitionFolder);
                var filesystemDir = new DirectoryInfo(partitionDir);
                foreach (DirectoryInfo dateDir in filesystemDir.GetDirectories())
                {
                    if (dateDir.FullName.EndsWith("Deleted") ||
                        dateDir.FullName.EndsWith(ServerPlatform.ReconcileStorageFolder))
                    {
                        continue;
                    }

                    foreach (DirectoryInfo studyDir in dateDir.GetDirectories())
                    {
                        String studyInstanceUid = studyDir.Name;
                        try
                        {
                            StudyStorageLocation location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid);
                            if (location == null)
                            {
                                foreach (DirectoryInfo seriesDir in studyDir.GetDirectories())
                                {
                                    FileInfo[] sopInstanceFiles = seriesDir.GetFiles("*.dcm");

                                    DicomFile file = null;
                                    foreach (FileInfo sopFile in sopInstanceFiles)
                                    {
                                        if (!sopFile.FullName.EndsWith(ServerPlatform.DicomFileExtension))
                                        {
                                            continue;
                                        }

                                        try
                                        {
                                            file = new DicomFile(sopFile.FullName);
                                            file.Load(DicomTags.StudyId, DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default);
                                            break;
                                        }
                                        catch (Exception e)
                                        {
                                            Platform.Log(LogLevel.Warn, e, "Unexpected failure loading file: {0}.  Continuing to next file.",
                                                         sopFile.FullName);
                                            file = null;
                                        }
                                    }
                                    if (file != null)
                                    {
                                        studyInstanceUid = file.DataSet[DicomTags.StudyInstanceUid].ToString();
                                        break;
                                    }
                                }

                                location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid);
                                if (location == null)
                                {
                                    continue;
                                }
                            }

                            ProcessStudy(partition, location, engine, postArchivalEngine, dataAccessEngine);
                            //_stats.NumStudies++;

                            if (CancelPending)
                            {
                                return;
                            }
                        }
                        catch (Exception e)
                        {
                            Platform.Log(LogLevel.Error, e,
                                         "Unexpected error while processing study: {0} on partition {1}.", studyInstanceUid,
                                         partition.Description);
                        }
                    }

                    // Cleanup the directory, if its empty.
                    DirectoryUtility.DeleteIfEmpty(dateDir.FullName);
                }
            }
        }
        protected override void ProcessItem(Model.WorkQueue item)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(StorageLocation, "StorageLocation");

            // Verify the study is not lossy online and lossless in the archive.
            // This could happen if the images were received WHILE the study was being lossy compressed.
            // The study state would not be set until the compression was completed or partially completed.
            CheckIfStudyIsLossy();


            Statistics.TotalProcessTime.Start();
            bool successful;
        	bool idle = false;
            //Load the specific UIDs that need to be processed.
            LoadUids(item);

            int totalUidCount = WorkQueueUidList.Count;

            if (totalUidCount == 0)
            {
                successful = true;
                idle = true;
            }
            else
            {
                try
                {
                    Context = new StudyProcessorContext(StorageLocation, WorkQueueItem);

                    // Load the rules engine
                    _sopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, item.ServerPartitionKey);
                    _sopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress);
                    _sopProcessedRulesEngine.Load();
                    Statistics.SopProcessedEngineLoadTime.Add(_sopProcessedRulesEngine.Statistics.LoadTime);
                    Context.SopProcessedRulesEngine = _sopProcessedRulesEngine;
                    
                    if (Study != null)
                    {
                        Platform.Log(LogLevel.Info, "Processing study {0} for Patient {1} (PatientId:{2} A#:{3}), {4} objects",
                                     Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                                     Study.AccessionNumber, WorkQueueUidList.Count);
                    }
                    else
                    {
                        Platform.Log(LogLevel.Info, "Processing study {0}, {1} objects",
                                     StorageLocation.StudyInstanceUid, WorkQueueUidList.Count);
                    }

                    // ProcessSavedFile the images in the list
                    successful = ProcessUidList(item) > 0;
                }
                catch (StudyIsNearlineException ex)
                {
                    // delay until the target is restored
                    // NOTE: If the study could not be restored after certain period of time, this entry will be failed.
                    if (ex.RestoreRequested)
                    {
                        PostponeItem(string.Format("Unable to auto-reconcile at this time: the target study {0} is not online yet. Restore has been requested.", ex.StudyInstanceUid));
                        return;
                    }
                	// fail right away
                	FailQueueItem(item, string.Format("Unable to auto-reconcile at this time: the target study {0} is not nearline and could not be restored.", ex.StudyInstanceUid));
                	return;
                }
            }
            Statistics.TotalProcessTime.End();

			if (successful)
			{
				if (idle && item.ExpirationTime <= Platform.Time)
				{
					// Run Study / Series Rules Engine.
					var engine = new StudyRulesEngine(StorageLocation, ServerPartition);
					engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed);

					// Log the FilesystemQueue related entries
					StorageLocation.LogFilesystemQueue();

					// Delete the queue entry.
					PostProcessing(item,
					               WorkQueueProcessorStatus.Complete,
					               WorkQueueProcessorDatabaseUpdate.ResetQueueState);
				}
				else if (idle)
					PostProcessing(item,
								   WorkQueueProcessorStatus.IdleNoDelete, // Don't delete, so we ensure the rules engine is run later.
								   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
				else
					PostProcessing(item,
								   WorkQueueProcessorStatus.Pending,
								   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
			}
			else
			{
				bool allFailedDuplicate = CollectionUtils.TrueForAll(WorkQueueUidList, uid => uid.Duplicate && uid.Failed);

				if (allFailedDuplicate)
				{
					Platform.Log(LogLevel.Error, "All entries are duplicates");

					PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
					return;
				}
				PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal);
			}				
        }
		/// <summary>
		/// Reprocess a file systems
		/// </summary>
		/// <param name="partition"></param>
		private void ReprocessPartition(ServerPartition partition)
		{
			var engine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);
			engine.Load();

			var postArchivalEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyArchived, partition.Key);
			postArchivalEngine.Load();

			var dataAccessEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.StudyProcessed, partition.Key);
			dataAccessEngine.AddIncludeType(ServerRuleTypeEnum.DataAccess);
			dataAccessEngine.Load();

			var filesystems = FilesystemMonitor.Instance.GetFilesystems();

			foreach (var f in filesystems)
			{
				var partitionDir = Path.Combine(f.Filesystem.FilesystemPath, partition.PartitionFolder);
				var filesystemDir = new DirectoryInfo(partitionDir);
				foreach (DirectoryInfo dateDir in filesystemDir.GetDirectories())
				{
					if (dateDir.FullName.EndsWith("Deleted")
						|| dateDir.FullName.EndsWith(ServerPlatform.ReconcileStorageFolder))
						continue;

					foreach (DirectoryInfo studyDir in dateDir.GetDirectories())
					{
						String studyInstanceUid = studyDir.Name;
						try
						{
							StudyStorageLocation location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid);
							if (location == null)
							{
								foreach (DirectoryInfo seriesDir in studyDir.GetDirectories())
								{
									FileInfo[] sopInstanceFiles = seriesDir.GetFiles("*.dcm");

									DicomFile file = null;
									foreach (FileInfo sopFile in sopInstanceFiles)
									{
										if (!sopFile.FullName.EndsWith(ServerPlatform.DicomFileExtension))
											continue;

										try
										{
											file = new DicomFile(sopFile.FullName);
											file.Load(DicomTags.StudyId, DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default);
											break;
										}
										catch (Exception e)
										{
											Platform.Log(LogLevel.Warn, e, "Unexpected failure loading file: {0}.  Continuing to next file.",
														 sopFile.FullName);
											file = null;
										}
									}
									if (file != null)
									{
										studyInstanceUid = file.DataSet[DicomTags.StudyInstanceUid].ToString();
										break;
									}
								}

								location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid);
								if (location == null)
									continue;
							}

							ProcessStudy(partition, location, engine, postArchivalEngine, dataAccessEngine);
							//_stats.NumStudies++;

							if (CancelPending) return;
						}
						catch (Exception e)
						{
							Platform.Log(LogLevel.Error, e,
										 "Unexpected error while processing study: {0} on partition {1}.", studyInstanceUid,
										 partition.Description);
						}
					}

					// Cleanup the directory, if its empty.
					DirectoryUtility.DeleteIfEmpty(dateDir.FullName);
				}
			}
		}
		/// <summary>
		/// Method for applying rules when a new series has been inserted.
		/// </summary>
		/// <param name="file">The DICOM file being processed.</param>
		/// <param name="processor">The command processor</param>
		private void ProcessSeriesRules(DicomFile file, CommandProcessor processor)
		{
			if (_seriesRulesEngine == null)
			{
				_seriesRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SeriesProcessed, _location.ServerPartitionKey);
				_seriesRulesEngine.Load();
			}
			else
			{
				_seriesRulesEngine.Statistics.LoadTime.Reset();
				_seriesRulesEngine.Statistics.ExecutionTime.Reset();
			}

			var context = new ServerActionContext(file, _location.FilesystemKey, _partition, _location.Key,processor);


		    _seriesRulesEngine.Execute(context);

		}
        /// <summary>
        /// Reprocess a file systems
        /// </summary>
        /// <param name="filesystem"></param>
        private void ReprocessFilesystem(Filesystem filesystem)
        {
            var filesystemDir = new DirectoryInfo(filesystem.FilesystemPath);

            foreach (DirectoryInfo partitionDir in filesystemDir.GetDirectories())
            {
                ServerPartition partition;
                if (GetServerPartition(partitionDir.Name, out partition) == false)
                {
                    if (!partitionDir.Name.EndsWith("_Incoming") && !partitionDir.Name.Equals("temp") &&
                        !partitionDir.Name.Equals("ApplicationLog") && !partitionDir.Name.Equals("AlertLog"))
                    {
                        Platform.Log(LogLevel.Error, "Unknown partition folder '{0}' in filesystem: {1}", partitionDir.Name,
                                     filesystem.Description);
                    }
                    continue;
                }

                // Since we found a partition, we should find a rules engine too.
                ServerRulesEngine engine             = _engines[partition];
                ServerRulesEngine postArchivalEngine = _postArchivalEngines[partition];
                ServerRulesEngine dataAccessEngine   = _dataAccessEngine[partition];

                foreach (DirectoryInfo dateDir in partitionDir.GetDirectories())
                {
                    if (dateDir.FullName.EndsWith("Deleted") ||
                        dateDir.FullName.EndsWith(ServerPlatform.ReconcileStorageFolder))
                    {
                        continue;
                    }

                    foreach (DirectoryInfo studyDir in dateDir.GetDirectories())
                    {
                        String studyInstanceUid = studyDir.Name;
                        try
                        {
                            StudyStorageLocation location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid);
                            if (location == null)
                            {
                                foreach (DirectoryInfo seriesDir in studyDir.GetDirectories())
                                {
                                    FileInfo[] sopInstanceFiles = seriesDir.GetFiles("*.dcm");

                                    DicomFile file = null;
                                    foreach (FileInfo sopFile in sopInstanceFiles)
                                    {
                                        if (!sopFile.FullName.EndsWith(ServerPlatform.DicomFileExtension))
                                        {
                                            continue;
                                        }

                                        try
                                        {
                                            file = new DicomFile(sopFile.FullName);
                                            file.Load(DicomTags.StudyId, DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default);
                                            break;
                                        }
                                        catch (Exception e)
                                        {
                                            Platform.Log(LogLevel.Warn, e, "Unexpected failure loading file: {0}.  Continuing to next file.",
                                                         sopFile.FullName);
                                            file = null;
                                        }
                                    }
                                    if (file != null)
                                    {
                                        studyInstanceUid = file.DataSet[DicomTags.StudyInstanceUid].ToString();
                                        break;
                                    }
                                }

                                location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid);
                                if (location == null)
                                {
                                    continue;
                                }
                            }

                            ProcessStudy(partition, location, engine, postArchivalEngine, dataAccessEngine);
                            _stats.NumStudies++;

                            if (CancelPending)
                            {
                                return;
                            }
                        }
                        catch (Exception e)
                        {
                            Platform.Log(LogLevel.Error, e,
                                         "Unexpected error while processing study: {0} on partition {1}.", studyInstanceUid,
                                         partition.Description);
                        }
                    }

                    // Cleanup the directory, if its empty.
                    DirectoryUtility.DeleteIfEmpty(dateDir.FullName);
                }
            }
        }
        protected override void ProcessItem(Model.WorkQueue item)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(StorageLocation, "StorageLocation");

            // Verify the study is not lossy online and lossless in the archive.
            // This could happen if the images were received WHILE the study was being lossy compressed.
            // The study state would not be set until the compression was completed or partially completed.
            CheckIfStudyIsLossy();


            Statistics.TotalProcessTime.Start();
            bool successful;
            bool idle = false;

            //Load the specific UIDs that need to be processed.
            LoadUids(item);

            int totalUidCount = WorkQueueUidList.Count;

            if (totalUidCount == 0)
            {
                successful = true;
                idle       = true;
            }
            else
            {
                try
                {
                    Context = new StudyProcessorContext(StorageLocation);

                    // Load the rules engine
                    _sopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, item.ServerPartitionKey);
                    _sopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress);
                    _sopProcessedRulesEngine.Load();
                    Statistics.SopProcessedEngineLoadTime.Add(_sopProcessedRulesEngine.Statistics.LoadTime);
                    Context.SopProcessedRulesEngine = _sopProcessedRulesEngine;

                    if (Study != null)
                    {
                        Platform.Log(LogLevel.Info, "Processing study {0} for Patient {1} (PatientId:{2} A#:{3}), {4} objects",
                                     Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                                     Study.AccessionNumber, WorkQueueUidList.Count);
                    }
                    else
                    {
                        Platform.Log(LogLevel.Info, "Processing study {0}, {1} objects",
                                     StorageLocation.StudyInstanceUid, WorkQueueUidList.Count);
                    }

                    // Process the images in the list
                    successful = ProcessUidList(item) > 0;
                }
                catch (StudyIsNearlineException ex)
                {
                    // delay until the target is restored
                    // NOTE: If the study could not be restored after certain period of time, this entry will be failed.
                    if (ex.RestoreRequested)
                    {
                        PostponeItem(string.Format("Unable to auto-reconcile at this time: the target study {0} is not online yet. Restore has been requested.", ex.StudyInstanceUid));
                        return;
                    }
                    // fail right away
                    FailQueueItem(item, string.Format("Unable to auto-reconcile at this time: the target study {0} is not nearline and could not be restored.", ex.StudyInstanceUid));
                    return;
                }
            }
            Statistics.TotalProcessTime.End();

            if (successful)
            {
                if (idle && item.ExpirationTime <= Platform.Time)
                {
                    // Run Study / Series Rules Engine.
                    var engine = new StudyRulesEngine(StorageLocation, ServerPartition);
                    engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed);

                    // Log the FilesystemQueue related entries
                    StorageLocation.LogFilesystemQueue();

                    // Delete the queue entry.
                    PostProcessing(item,
                                   WorkQueueProcessorStatus.Complete,
                                   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
                else if (idle)
                {
                    PostProcessing(item,
                                   WorkQueueProcessorStatus.IdleNoDelete,                                 // Don't delete, so we ensure the rules engine is run later.
                                   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
                else
                {
                    PostProcessing(item,
                                   WorkQueueProcessorStatus.Pending,
                                   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
            }
            else
            {
                bool allFailedDuplicate = CollectionUtils.TrueForAll(WorkQueueUidList, uid => uid.Duplicate && uid.Failed);

                if (allFailedDuplicate)
                {
                    Platform.Log(LogLevel.Error, "All entries are duplicates");

                    PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
                    return;
                }
                PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal);
            }
        }
		public UpdateStudyCommand(ServerPartition partition, 
		                          StudyStorageLocation studyLocation,
		                          IList<BaseImageLevelUpdateCommand> imageLevelCommands,
								  ServerRuleApplyTimeEnum applyTime,
								  WorkQueue workQueue) 
			: base("Update existing study")
		{
			_partition = partition;
			_oldStudyLocation = studyLocation;
			_commands = imageLevelCommands;
			_workQueue = workQueue;
			_statistics = new UpdateStudyStatistics(_oldStudyLocation.StudyInstanceUid);
			// Load the engine for editing rules.
			_rulesEngine = new ServerRulesEngine(applyTime, _partition.Key);
			if (applyTime.Equals(ServerRuleApplyTimeEnum.SopProcessed))
				_rulesEngine.AddIncludeType(ServerRuleTypeEnum.AutoRoute);
			_rulesEngine.Load();
		}
Beispiel #17
0
        /// <summary>
        /// Reprocess a specific study.
        /// </summary>
        /// <param name="partition">The ServerPartition the study is on.</param>
        /// <param name="location">The storage location of the study to process.</param>
        /// <param name="engine">The rules engine to use when processing the study.</param>
        /// <param name="postArchivalEngine">The rules engine used for studies that have been archived.</param>
        /// <param name="dataAccessEngine">The rules engine strictly used for setting data acess.</param>
        protected static void ProcessStudy(ServerPartition partition, StudyStorageLocation location, ServerRulesEngine engine, ServerRulesEngine postArchivalEngine, ServerRulesEngine dataAccessEngine)
        {
            if (!location.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle) || !location.AcquireWriteLock())
            {
                Platform.Log(LogLevel.Error, "Unable to lock study {0}. The study is being processed. (Queue State: {1})", location.StudyInstanceUid, location.QueueStudyStateEnum.Description);
            }
            else
            {
                try
                {
                    DicomFile msg = LoadInstance(location);
                    if (msg == null)
                    {
                        Platform.Log(LogLevel.Error, "Unable to load file for study {0}", location.StudyInstanceUid);
                        return;
                    }

                    bool archiveQueueExists;
                    bool archiveStudyStorageExists;
                    bool filesystemDeleteExists;
                    using (IReadContext read = PersistentStoreRegistry.GetDefaultStore().OpenReadContext())
                    {
                        // Check for existing archive queue entries
                        var archiveQueueBroker   = read.GetBroker <IArchiveQueueEntityBroker>();
                        var archiveQueueCriteria = new ArchiveQueueSelectCriteria();
                        archiveQueueCriteria.StudyStorageKey.EqualTo(location.Key);
                        archiveQueueExists = archiveQueueBroker.Count(archiveQueueCriteria) > 0;


                        var archiveStorageBroker        = read.GetBroker <IArchiveStudyStorageEntityBroker>();
                        var archiveStudyStorageCriteria = new ArchiveStudyStorageSelectCriteria();
                        archiveStudyStorageCriteria.StudyStorageKey.EqualTo(location.Key);
                        archiveStudyStorageExists = archiveStorageBroker.Count(archiveStudyStorageCriteria) > 0;

                        var filesystemQueueBroker   = read.GetBroker <IFilesystemQueueEntityBroker>();
                        var filesystemQueueCriteria = new FilesystemQueueSelectCriteria();
                        filesystemQueueCriteria.StudyStorageKey.EqualTo(location.Key);
                        filesystemQueueCriteria.FilesystemQueueTypeEnum.EqualTo(FilesystemQueueTypeEnum.DeleteStudy);
                        filesystemDeleteExists = filesystemQueueBroker.Count(filesystemQueueCriteria) > 0;
                    }

                    using (var commandProcessor = new ServerCommandProcessor("Study Rule Processor")
                    {
                        PrimaryServerPartitionKey = partition.GetKey(),
                        PrimaryStudyKey = location.Study.GetKey()
                    })
                    {
                        var context = new ServerActionContext(msg, location.FilesystemKey, partition, location.Key, commandProcessor);

                        // Check if the Study has been archived
                        if (archiveStudyStorageExists && !archiveQueueExists && !filesystemDeleteExists)
                        {
                            // Add a command to delete the current filesystemQueue entries, so that they can
                            // be reinserted by the rules engine.
                            context.CommandProcessor.AddCommand(new DeleteFilesystemQueueCommand(location.Key, ServerRuleApplyTimeEnum.StudyArchived));

                            // How to deal with exiting FilesystemQueue entries is problematic here.  If the study
                            // has been migrated off tier 1, we probably don't want to modify the tier migration
                            // entries.  Compression entries may have been entered when the Study was initially
                            // processed, we don't want to delete them, because they might still be valid.
                            // We just re-run the rules engine at this point, and delete only the StudyPurge entries,
                            // since those we know at least would only be applied for archived studies.
                            var studyRulesEngine = new StudyRulesEngine(postArchivalEngine, location, location.ServerPartition, location.LoadStudyXml());
                            studyRulesEngine.Apply(ServerRuleApplyTimeEnum.StudyArchived, commandProcessor);

                            // Post Archive doesn't allow data access rules.  Force Data Access rules to be reapplied
                            // to these studies also.
                            dataAccessEngine.Execute(context);
                        }
                        else
                        {
                            // Add a command to delete the current filesystemQueue entries, so that they can
                            // be reinserted by the rules engine.
                            context.CommandProcessor.AddCommand(new DeleteFilesystemQueueCommand(location.Key, ServerRuleApplyTimeEnum.StudyProcessed));

                            // Execute the rules engine, insert commands to update the database into the command processor.
                            // Due to ticket #11673, we create a new rules engine instance for each study, since the Study QC rules
                            // don't work right now with a single rules engine.
                            //TODO CR (Jan 2014) - Check if we can go back to caching the rules engine to reduce database hits on the rules
                            var studyRulesEngine = new StudyRulesEngine(location, location.ServerPartition, location.LoadStudyXml());
                            studyRulesEngine.Apply(ServerRuleApplyTimeEnum.StudyProcessed, commandProcessor);
                        }

                        // Do the actual database updates.
                        if (false == context.CommandProcessor.Execute())
                        {
                            Platform.Log(LogLevel.Error, "Unexpected failure processing Study level rules for study {0}", location.StudyInstanceUid);
                        }

                        // Log the FilesystemQueue related entries
                        location.LogFilesystemQueue();
                    }
                }
                finally
                {
                    location.ReleaseWriteLock();
                }
            }
        }
		/// <summary>
		/// Reprocess a specific study.
		/// </summary>
		/// <param name="partition">The ServerPartition the study is on.</param>
		/// <param name="location">The storage location of the study to process.</param>
		/// <param name="engine">The rules engine to use when processing the study.</param>
		/// <param name="postArchivalEngine">The rules engine used for studies that have been archived.</param>
		/// <param name="dataAccessEngine">The rules engine strictly used for setting data acess.</param>
		protected static void ProcessStudy(ServerPartition partition, StudyStorageLocation location, ServerRulesEngine engine, ServerRulesEngine postArchivalEngine, ServerRulesEngine dataAccessEngine)
		{
			if (!location.QueueStudyStateEnum.Equals(QueueStudyStateEnum.Idle) || !location.AcquireWriteLock())
			{
				Platform.Log(LogLevel.Error, "Unable to lock study {0}. The study is being processed. (Queue State: {1})", location.StudyInstanceUid,location.QueueStudyStateEnum.Description); 
			}
			else
			{
				try
				{
					DicomFile msg = LoadInstance(location);
					if (msg == null)
					{
						Platform.Log(LogLevel.Error, "Unable to load file for study {0}", location.StudyInstanceUid);
						return;
					}

					bool archiveQueueExists;
					bool archiveStudyStorageExists;
					bool filesystemDeleteExists;
					using (IReadContext read = PersistentStoreRegistry.GetDefaultStore().OpenReadContext())
					{
						// Check for existing archive queue entries
						var archiveQueueBroker = read.GetBroker<IArchiveQueueEntityBroker>();
						var archiveQueueCriteria = new ArchiveQueueSelectCriteria();
						archiveQueueCriteria.StudyStorageKey.EqualTo(location.Key);
						archiveQueueExists = archiveQueueBroker.Count(archiveQueueCriteria) > 0;


						var archiveStorageBroker = read.GetBroker<IArchiveStudyStorageEntityBroker>();
						var archiveStudyStorageCriteria = new ArchiveStudyStorageSelectCriteria();
						archiveStudyStorageCriteria.StudyStorageKey.EqualTo(location.Key);
						archiveStudyStorageExists = archiveStorageBroker.Count(archiveStudyStorageCriteria) > 0;

						var filesystemQueueBroker = read.GetBroker<IFilesystemQueueEntityBroker>();
						var filesystemQueueCriteria = new FilesystemQueueSelectCriteria();
						filesystemQueueCriteria.StudyStorageKey.EqualTo(location.Key);
						filesystemQueueCriteria.FilesystemQueueTypeEnum.EqualTo(FilesystemQueueTypeEnum.DeleteStudy);
						filesystemDeleteExists = filesystemQueueBroker.Count(filesystemQueueCriteria) > 0;
					}

					using (var commandProcessor = new ServerCommandProcessor("Study Rule Processor"))
					{
						var context = new ServerActionContext(msg, location.FilesystemKey, partition, location.Key, commandProcessor);
					
						// Check if the Study has been archived 
						if (archiveStudyStorageExists && !archiveQueueExists && !filesystemDeleteExists)
						{
							// Add a command to delete the current filesystemQueue entries, so that they can 
							// be reinserted by the rules engine.
							context.CommandProcessor.AddCommand(new DeleteFilesystemQueueCommand(location.Key, ServerRuleApplyTimeEnum.StudyArchived));

							// How to deal with exiting FilesystemQueue entries is problematic here.  If the study
							// has been migrated off tier 1, we probably don't want to modify the tier migration 
							// entries.  Compression entries may have been entered when the Study was initially 
							// processed, we don't want to delete them, because they might still be valid.  
							// We just re-run the rules engine at this point, and delete only the StudyPurge entries,
							// since those we know at least would only be applied for archived studies.
							var studyRulesEngine = new StudyRulesEngine(postArchivalEngine, location, location.ServerPartition, location.LoadStudyXml());
							studyRulesEngine.Apply(ServerRuleApplyTimeEnum.StudyArchived, commandProcessor);

							// Post Archive doesn't allow data access rules.  Force Data Access rules to be reapplied
							// to these studies also.
							dataAccessEngine.Execute(context);
						}
						else
						{
							// Add a command to delete the current filesystemQueue entries, so that they can 
							// be reinserted by the rules engine.
							context.CommandProcessor.AddCommand(new DeleteFilesystemQueueCommand(location.Key,ServerRuleApplyTimeEnum.StudyProcessed));

							// Execute the rules engine, insert commands to update the database into the command processor.
							// Due to ticket #11673, we create a new rules engine instance for each study, since the Study QC rules
							// don't work right now with a single rules engine.
							//TODO CR (Jan 2014) - Check if we can go back to caching the rules engine to reduce database hits on the rules
							var studyRulesEngine = new StudyRulesEngine(location, location.ServerPartition, location.LoadStudyXml());
							studyRulesEngine.Apply(ServerRuleApplyTimeEnum.StudyProcessed, commandProcessor);
						}

						// Do the actual database updates.
						if (false == context.CommandProcessor.Execute())
						{
							Platform.Log(LogLevel.Error, "Unexpected failure processing Study level rules for study {0}", location.StudyInstanceUid);
						}

						// Log the FilesystemQueue related entries
						location.LogFilesystemQueue();
					}
				}
				finally
				{
					location.ReleaseWriteLock();
				}
			}
		}
 public ApplySopRulesCommand(ServerActionContext context, ServerRulesEngine engine)
     : base("Apply SOP Rules Engine and insert Archival Request", false)
 {
     _context = context;
     _engine  = engine;
 }
Beispiel #20
0
        public ValidationResult ValidateServerRule(string serverRule, string ruleType)
        {
            ValidationResult result = new ValidationResult();

            if (String.IsNullOrEmpty(serverRule))
            {
                result.ErrorText = ValidationErrors.ServerRuleXMLIsMissing;
                result.Success   = false;
                result.ErrorCode = -5000;
                return(result);
            }

            ServerRuleTypeEnum type;

            try
            {
                type = ServerRuleTypeEnum.GetEnum(ruleType);
            }
            catch (Exception e)
            {
                result.ErrorText = String.Format(ValidationErrors.UnableToParseServerRuleXML, e.Message);
                result.Success   = false;
                result.ErrorCode = -5000;
                return(result);
            }

            XmlDocument theDoc = new XmlDocument();

            try
            {
                string xml = Microsoft.JScript.GlobalObject.unescape(serverRule);

                if (type.Equals(ServerRuleTypeEnum.DataAccess))
                {
                    // Validated DataAccess rules only have the condition.  Make a fake
                    // rule that includes a non-op action
                    xml = String.Format("<rule>{0}<action><no-op/></action></rule>", xml);
                }

                theDoc.LoadXml(xml);
            }
            catch (Exception e)
            {
                result.ErrorText = String.Format(ValidationErrors.UnableToParseServerRuleXML, e.Message);
                result.Success   = false;
                result.ErrorCode = -5000;
                return(result);
            }

            string error;

            if (false == Rule <ServerActionContext> .ValidateRule(
                    theDoc,
                    ServerRulesEngine.GetSpecificationCompiler(),
                    ServerRulesEngine.GetActionCompiler(type),
                    out error))
            {
                result.ErrorText = error;
                result.Success   = false;
                result.ErrorCode = -5000;
            }
            else
            {
                result.Success = true;
            }

            return(result);
        }