/// <summary>
        /// Create Duplicate SIQ Entry
        /// </summary>
        /// <param name="file"></param>
        /// <param name="location"></param>
        /// <param name="sourcePath"></param>
        /// <param name="queue"></param>
        /// <param name="uid"></param>
        public static void CreateDuplicateSIQEntry(DicomFile file, StudyStorageLocation location, string sourcePath, WorkQueue queue, WorkQueueUid uid)
        {
            Platform.Log(LogLevel.Info, "Creating Work Queue Entry for duplicate...");
            String uidGroup = queue.GroupID ?? queue.GetKey().Key.ToString();

            using (var commandProcessor = new ServerCommandProcessor("Insert Work Queue entry for duplicate"))
            {
                commandProcessor.AddCommand(new FileDeleteCommand(sourcePath, true));

                var sopProcessingContext     = new SopProcessingContext(commandProcessor, location, uidGroup);
                DicomProcessingResult result = Process(sopProcessingContext, file);
                if (!result.Successful)
                {
                    FailUid(uid, true);
                    return;
                }

                commandProcessor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                if (!commandProcessor.Execute())
                {
                    Platform.Log(LogLevel.Error, "Unexpected error when creating duplicate study integrity queue entry: {0}", commandProcessor.FailureReason);
                    FailUid(uid, true);
                }
            }
        }
        static private void SaveDuplicate(SopProcessingContext context, DicomFile file)
        {
            String sopUid = file.DataSet[DicomTags.SopInstanceUid].ToString();

            String path = Path.Combine(context.StudyLocation.FilesystemPath, context.StudyLocation.PartitionFolder);

            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path = Path.Combine(path, context.Group /* the AE title + timestamp */);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path = Path.Combine(path, context.StudyLocation.StudyInstanceUid);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path  = Path.Combine(path, sopUid);
            path += "." + ServerPlatform.DuplicateFileExtension;

            context.CommandProcessor.AddCommand(new SaveDicomFileCommand(path, file, true));

            Platform.Log(ServerPlatform.InstanceLogLevel, "Duplicate ==> {0}", path);
        }
Beispiel #3
0
 /// <summary>
 /// Creates an instance of <see cref="ImageReconciler"/>
 /// </summary>
 /// <param name="context"></param>
 public ImageReconciler(SopProcessingContext context)
 {
     _context = context;
 }
    	private DicomProcessingResult HandleDuplicate(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomFile file)
        {
        	Study study = studyLocation.Study ??
                          studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext);
            if (study != null)
                Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2}  Patient: {3}  ID:{4})",
                             sopInstanceUid,
                             study.AccessionNumber, study.StudyInstanceUid,
                             study.PatientsName, study.PatientId);
            else
                Platform.Log(LogLevel.Info,
                             "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.",
                             sopInstanceUid, studyLocation.StudyInstanceUid);

            SopProcessingContext sopProcessingContext = new SopProcessingContext(commandProcessor, studyLocation, _context.ContextID);
            DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, file);
            return result;
        }
    	static private void SaveDuplicate(SopProcessingContext context, DicomFile file)
        {
            String sopUid = file.DataSet[DicomTags.SopInstanceUid].ToString();

            String path = Path.Combine(context.StudyLocation.FilesystemPath, context.StudyLocation.PartitionFolder);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

			path = Path.Combine(path, ServerPlatform.ReconcileStorageFolder);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path = Path.Combine(path, context.Group /* the AE title + timestamp */);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path = Path.Combine(path, context.StudyLocation.StudyInstanceUid);
            context.CommandProcessor.AddCommand(new CreateDirectoryCommand(path));

            path = Path.Combine(path, sopUid);
			path += "." + ServerPlatform.DuplicateFileExtension;

            context.CommandProcessor.AddCommand(new SaveDicomFileCommand(path, file, true));

            Platform.Log(ServerPlatform.InstanceLogLevel, "Duplicate ==> {0}", path);
        }
		/// <summary>
		/// Create Duplicate SIQ Entry
		/// </summary>
		/// <param name="file"></param>
		/// <param name="location"></param>
		/// <param name="sourcePath"></param>
		/// <param name="queue"></param>
		/// <param name="uid"></param>
		public static void CreateDuplicateSIQEntry(DicomFile file, StudyStorageLocation location, string sourcePath, WorkQueue queue, WorkQueueUid uid)
		{
			Platform.Log(LogLevel.Info, "Creating Work Queue Entry for duplicate...");
			String uidGroup = queue.GroupID ?? queue.GetKey().Key.ToString();
			using (var commandProcessor = new ServerCommandProcessor("Insert Work Queue entry for duplicate"))
			{
				commandProcessor.AddCommand(new FileDeleteCommand(sourcePath, true));

				var sopProcessingContext = new SopProcessingContext(commandProcessor, location, uidGroup);
				DicomProcessingResult result = Process(sopProcessingContext, file);
				if (!result.Successful)
				{
					FailUid(uid, true);
					return;
				}

				commandProcessor.AddCommand(new DeleteWorkQueueUidCommand(uid));

				if (!commandProcessor.Execute())
				{
					Platform.Log(LogLevel.Error, "Unexpected error when creating duplicate study integrity queue entry: {0}", commandProcessor.FailureReason);
					FailUid(uid, true);
				}
			}
		}
        // TODO: Make these values configurable
      
        #endregion

        #region Public Methods

        /// <summary>
        /// Inserts the duplicate DICOM file into the <see cref="WorkQueue"/> for processing (if applicable).
        /// </summary>
        /// <param name="context">The processing context.</param>
        /// <param name="file">Thje duplicate DICOM file being processed.</param>
        /// <returns>A <see cref="DicomProcessingResult"/> that contains the result of the processing.</returns>
        /// <remarks>
        /// This method inserts <see cref="ServerCommand"/> into <paramref name="context.CommandProcessor"/>.
        /// The outcome of the operation depends on the <see cref="DuplicateSopPolicyEnum"/> of the <see cref="ServerPartition"/>.
        /// If it is set to <see cref="DuplicateSopPolicyEnum.CompareDuplicates"/>, the duplicate file will be
        /// inserted into the <see cref="WorkQueue"/> for processing.
        /// </remarks>
        static public DicomProcessingResult Process(SopProcessingContext context, DicomFile file)
        {
            Platform.CheckForNullReference(file, "file");
            Platform.CheckForNullReference(context, "context");
            Platform.CheckMemberIsSet(context.Group, "parameters.Group");
            Platform.CheckMemberIsSet(context.CommandProcessor, "parameters.CommandProcessor");
            Platform.CheckMemberIsSet(context.StudyLocation, "parameters.StudyLocation");

            var result = new DicomProcessingResult
                             {
                                 DicomStatus = DicomStatuses.Success,
                                 Successful = true,
                                 StudyInstanceUid = file.DataSet[DicomTags.StudyInstanceUid].GetString(0, string.Empty),
                                 SeriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty),
                                 SopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty),
                                 SopClassUid = file.DataSet[DicomTags.SopClassUid].GetString(0, string.Empty),
                                 AccessionNumber = file.DataSet[DicomTags.AccessionNumber].GetString(0, string.Empty)
                             };

        	string failureMessage;

            if (SopClassIsReport(result.SopClassUid) && context.StudyLocation.ServerPartition.AcceptLatestReport)
            {
                Platform.Log(LogLevel.Info, "Duplicate Report received, overwriting {0}", result.SopInstanceUid);
                SaveDuplicate(context, file);
                context.CommandProcessor.AddCommand(
                    new UpdateWorkQueueCommand(file, context.StudyLocation, true, ServerPlatform.DuplicateFileExtension, context.Group));
                return result;
            }

            if (DuplicatePolicy.IsParitionDuplicatePolicyOverridden(context.StudyLocation))
            {
                Platform.Log(LogLevel.Warn, "Duplicate instance received for study {0} on Partition {1}. Duplicate policy overridden. Will overwrite {2}", 
                                result.StudyInstanceUid, context.StudyLocation.ServerPartition.AeTitle, result.SopInstanceUid);
                SaveDuplicate(context, file);
                context.CommandProcessor.AddCommand(new UpdateWorkQueueCommand(file, context.StudyLocation, true, ServerPlatform.DuplicateFileExtension, context.Group));
                return result;
            }
            else
            {
                if (context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum.Equals(DuplicateSopPolicyEnum.SendSuccess))
                {
                    Platform.Log(LogLevel.Info, "Duplicate SOP Instance received, sending success response {0}", result.SopInstanceUid);
                    return result;
                }
                if (context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum.Equals(DuplicateSopPolicyEnum.RejectDuplicates))
                {
                    failureMessage = String.Format("Duplicate SOP Instance received, rejecting {0}", result.SopInstanceUid);
                    Platform.Log(LogLevel.Info, failureMessage);
                    result.SetError(DicomStatuses.DuplicateSOPInstance, failureMessage);
                    return result;
                }

                if (context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum.Equals(DuplicateSopPolicyEnum.CompareDuplicates))
                {
                    SaveDuplicate(context, file);
                    context.CommandProcessor.AddCommand(
                        new UpdateWorkQueueCommand(file, context.StudyLocation, true, ServerPlatform.DuplicateFileExtension, context.Group));
                }
                else
                {
                    failureMessage = String.Format("Duplicate SOP Instance received. Unsupported duplicate policy {0}.", context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum);
                    result.SetError(DicomStatuses.DuplicateSOPInstance, failureMessage);
                    return result;
                }
            }
            

        	return result;
        }
		/// <summary>
		/// Schedules a reconciliation for the specified <see cref="DicomFile"/>
		/// </summary>
		/// <param name="context"></param>
		/// <param name="file"></param>
		/// <param name="uid"></param>
		private static void ScheduleReconcile(SopProcessingContext context, DicomFile file, WorkQueueUid uid)
		{
			ImageReconciler reconciler = new ImageReconciler(context);
			reconciler.ScheduleReconcile(file, StudyIntegrityReasonEnum.InconsistentData, uid);
		}
		/// <summary>
		/// Process a specific DICOM file related to a <see cref="WorkQueue"/> request.
		/// </summary>
		/// <remarks>
		/// <para>
		/// On success and if <see cref="uid"/> is set, the <see cref="WorkQueueUid"/> field is deleted.
		/// </para>
		/// </remarks>
		/// <param name="stream">The <see cref="StudyXml"/> file to update with information from the file.</param>
		/// <param name="group">A group the sop is associated with.</param>
		/// <param name="file">The file to process.</param>
		/// <param name="compare">Flag to compare the demographics of <see cref="file"/> with the demographics in the database</param>
		/// <param name="retry">Flag telling if the item should be retried on failure.  Note that if the item is a duplicate, the WorkQueueUid item is not failed. </param>
		/// <param name="uid">An optional WorkQueueUid associated with the entry, that will be deleted upon success or failed on failure.</param>
		/// <param name="deleteFile">An option file to delete as part of the process</param>
        /// <exception cref="Exception"/>
        /// <exception cref="DicomDataException"/>
        public  ProcessingResult ProcessFile(string group, DicomFile file, StudyXml stream, bool compare, bool retry, WorkQueueUid uid, string deleteFile)
		{
		    Platform.CheckForNullReference(file, "file");

            try
            {
                CheckDataLength(file);

                _instanceStats.ProcessTime.Start();
                ProcessingResult result = new ProcessingResult
                                              {
                                                  Status = ProcessingStatus.Success
                                              };

                using (ServerCommandProcessor processor = new ServerCommandProcessor("Process File"))
                {
                    SopProcessingContext processingContext = new SopProcessingContext(processor,
                                                                                      _context.StorageLocation, group);

                    if (EnforceNameRules)
                    {
                        _patientNameRules.Apply(file);
                    }

                    if (compare && ShouldReconcile(_context.StorageLocation, file))
                    {
                        ScheduleReconcile(processingContext, file, uid);
                        result.Status = ProcessingStatus.Reconciled;
                    }
                    else
                    {
                        InsertInstance(file, stream, uid, deleteFile);
                        result.Status = ProcessingStatus.Success;
                    }
                }

                _instanceStats.ProcessTime.End();

                if (_context.SopProcessedRulesEngine.Statistics.LoadTime.IsSet)
                    _instanceStats.SopRulesLoadTime.Add(_context.SopProcessedRulesEngine.Statistics.LoadTime);

                if (_context.SopProcessedRulesEngine.Statistics.ExecutionTime.IsSet)
                    _instanceStats.SopEngineExecutionTime.Add(_context.SopProcessedRulesEngine.Statistics.ExecutionTime);

                _context.SopProcessedRulesEngine.Statistics.Reset();

                //TODO: Should throw exception if result is failed?
                return result;

            }
            catch (Exception e)
            {
                // If its a duplicate, ignore the exception, and just throw it
                if (deleteFile != null && (e is InstanceAlreadyExistsException
                        || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException))
                    throw;

                if (uid != null)
                    FailUid(uid, retry);
                throw;
            }
		}
        // TODO: Make these values configurable

        #endregion

        #region Public Methods

        /// <summary>
        /// Inserts the duplicate DICOM file into the <see cref="WorkQueue"/> for processing (if applicable).
        /// </summary>
        /// <param name="context">The processing context.</param>
        /// <param name="file">Thje duplicate DICOM file being processed.</param>
        /// <returns>A <see cref="DicomProcessingResult"/> that contains the result of the processing.</returns>
        /// <remarks>
        /// This method inserts <see cref="ServerCommand"/> into <paramref name="context.CommandProcessor"/>.
        /// The outcome of the operation depends on the <see cref="DuplicateSopPolicyEnum"/> of the <see cref="ServerPartition"/>.
        /// If it is set to <see cref="DuplicateSopPolicyEnum.CompareDuplicates"/>, the duplicate file will be
        /// inserted into the <see cref="WorkQueue"/> for processing.
        /// </remarks>
        static public DicomProcessingResult Process(SopProcessingContext context, DicomFile file)
        {
            Platform.CheckForNullReference(file, "file");
            Platform.CheckForNullReference(context, "context");
            Platform.CheckMemberIsSet(context.Group, "parameters.Group");
            Platform.CheckMemberIsSet(context.CommandProcessor, "parameters.CommandProcessor");
            Platform.CheckMemberIsSet(context.StudyLocation, "parameters.StudyLocation");

            var result = new DicomProcessingResult
            {
                DicomStatus       = DicomStatuses.Success,
                Successful        = true,
                StudyInstanceUid  = file.DataSet[DicomTags.StudyInstanceUid].GetString(0, string.Empty),
                SeriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty),
                SopInstanceUid    = file.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty),
                SopClassUid       = file.DataSet[DicomTags.SopClassUid].GetString(0, string.Empty),
                AccessionNumber   = file.DataSet[DicomTags.AccessionNumber].GetString(0, string.Empty)
            };

            string failureMessage;

            if (SopClassIsReport(result.SopClassUid) && context.StudyLocation.ServerPartition.AcceptLatestReport)
            {
                Platform.Log(LogLevel.Info, "Duplicate Report received, overwriting {0}", result.SopInstanceUid);
                SaveDuplicate(context, file);
                context.CommandProcessor.AddCommand(
                    new UpdateWorkQueueCommand(file, context.StudyLocation, true, ServerPlatform.DuplicateFileExtension, context.Group));
                return(result);
            }

            if (DuplicatePolicy.IsParitionDuplicatePolicyOverridden(context.StudyLocation))
            {
                Platform.Log(LogLevel.Warn, "Duplicate instance received for study {0} on Partition {1}. Duplicate policy overridden. Will overwrite {2}",
                             result.StudyInstanceUid, context.StudyLocation.ServerPartition.AeTitle, result.SopInstanceUid);
                SaveDuplicate(context, file);
                context.CommandProcessor.AddCommand(new UpdateWorkQueueCommand(file, context.StudyLocation, true, ServerPlatform.DuplicateFileExtension, context.Group));
                return(result);
            }
            else
            {
                if (context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum.Equals(DuplicateSopPolicyEnum.SendSuccess))
                {
                    Platform.Log(LogLevel.Info, "Duplicate SOP Instance received, sending success response {0}", result.SopInstanceUid);
                    return(result);
                }
                if (context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum.Equals(DuplicateSopPolicyEnum.RejectDuplicates))
                {
                    failureMessage = String.Format("Duplicate SOP Instance received, rejecting {0}", result.SopInstanceUid);
                    Platform.Log(LogLevel.Info, failureMessage);
                    result.SetError(DicomStatuses.DuplicateSOPInstance, failureMessage);
                    return(result);
                }

                if (context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum.Equals(DuplicateSopPolicyEnum.CompareDuplicates))
                {
                    SaveDuplicate(context, file);
                    context.CommandProcessor.AddCommand(
                        new UpdateWorkQueueCommand(file, context.StudyLocation, true, ServerPlatform.DuplicateFileExtension, context.Group));
                }
                else
                {
                    failureMessage = String.Format("Duplicate SOP Instance received. Unsupported duplicate policy {0}.", context.StudyLocation.ServerPartition.DuplicateSopPolicyEnum);
                    result.SetError(DicomStatuses.DuplicateSOPInstance, failureMessage);
                    return(result);
                }
            }


            return(result);
        }