private ProcessDuplicateResult OverwriteDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

            var result = new ProcessDuplicateResult();

            result.ActionTaken = DuplicateProcessResultAction.Accept;

            using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
            {
                var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
                processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

                // Ideally we don't need to insert the instance into the database since it's a duplicate.
                // However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
                // For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
                // But it will be corrected at the end of the processing when the study verification is executed.
                processor.AddCommand(new InsertInstanceCommand(dupFile, Context.StorageLocation));

                // Update the StudyStream object
                processor.AddCommand(new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                if (!processor.Execute())
                {
                    // cause the item to fail
                    throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
                }
            }

            return(result);
        }
        private ProcessDuplicateResult ProcessDuplicateReport(DicomFile dupFile, DicomFile baseFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();

            DateTime?dupTime = DateTimeParser.ParseDateAndTime(dupFile.DataSet, 0, DicomTags.InstanceCreationDate,
                                                               DicomTags.InstanceCreationTime);

            DateTime?baseTime = DateTimeParser.ParseDateAndTime(baseFile.DataSet, 0, DicomTags.InstanceCreationDate,
                                                                DicomTags.InstanceCreationTime);

            if (dupTime.HasValue && baseTime.HasValue)
            {
                if (dupTime.Value <= baseTime.Value)
                {
                    RemoveWorkQueueUid(uid, dupFile.Filename);
                    result.ActionTaken = DuplicateProcessResultAction.Delete;
                    return(result);
                }
            }

            result.ActionTaken = DuplicateProcessResultAction.Accept;
            SaveDuplicateReport(uid, dupFile.Filename, baseFile.Filename, dupFile, studyXml);

            return(result);
        }
        /// <summary>
        /// Overwrites existing copy with the received duplicate. Update the database, study xml and applies any SOP rules.
        /// </summary>
        /// <param name="dupFile"></param>
        /// <param name="uid"></param>
        /// <param name="studyXml"></param>
        /// <returns></returns>
        private ProcessDuplicateResult OverwriteAndUpdateDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

            var result = new ProcessDuplicateResult();

            result.ActionTaken = DuplicateProcessResultAction.Accept;

            using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
            {
                var sopContext = new ServerActionContext(dupFile, Context.StorageLocation.FilesystemKey, Context.StorageLocation.ServerPartition, Context.StorageLocation.Key, processor);

                var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
                processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

                // Do so that the FileSize calculation inInsertStudyXmlCommand works
                dupFile.Filename = destination;

                // Update the StudyStream object
                var insertStudyXmlCommand = new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation);
                processor.AddCommand(insertStudyXmlCommand);

                // Ideally we don't need to insert the instance into the database since it's a duplicate.
                // However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
                // For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
                // But it will be corrected at the end of the processing when the study verification is executed.
                processor.AddCommand(new UpdateInstanceCommand(Context.StorageLocation.ServerPartition, Context.StorageLocation, dupFile));

                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                processor.AddCommand(new ApplySopRulesCommand(sopContext, Context.SopProcessedRulesEngine));

                if (!processor.Execute())
                {
                    EventManager.FireEvent(this, new FailedUpdateSopEventArgs {
                        File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason
                    });

                    // cause the item to fail
                    throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
                }

                EventManager.FireEvent(this, new UpdateSopEventArgs {
                    File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize
                });
            }

            return(result);
        }
        private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();

            var data = uid.SerializeWorkQueueUidData;

            var basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(basePath))
            {
                // NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
                // no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
                // We have nothing to compare against so let's just throw it into the SIQ queue.
                CreateDuplicateSIQEntry(uid, dupFile, null);
                result.ActionTaken = DuplicateProcessResultAction.Reconcile;
            }
            else
            {
                var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare;

                switch (duplicateEnum)
                {
                case DuplicateProcessingEnum.OverwriteSop:
                    // Note: There's actually no difference between OverwriteDuplicate and OverwriteAndUpdateDuplicate. But decided to leave it as is.
                    return(OverwriteDuplicate(dupFile, uid, studyXml));

                case DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase:
                    return(OverwriteAndUpdateDuplicate(dupFile, uid, studyXml));

                case DuplicateProcessingEnum.OverwriteReport:
                    var file = new DicomFile(basePath);
                    return(ProcessDuplicateReport(dupFile, file, uid, studyXml));

                case DuplicateProcessingEnum.Compare:
                    var baseFile = new DicomFile(basePath);
                    return(CompareDuplicates(dupFile, baseFile, uid));

                default:
                    throw new InvalidOperationException("");
                }
            }

            return(result);
        }
		private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
		{
			var result = new ProcessDuplicateResult();

			var data = uid.SerializeWorkQueueUidData;

			string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid);
			string basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
			if (!File.Exists(basePath))
			{
				// NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
				// no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
				// We have nothing to compare against so let's just throw it into the SIQ queue.
				CreateDuplicateSIQEntry(uid, dupFile, null);
				result.ActionTaken = DuplicateProcessResultAction.Reconcile;
			}
			else
			{
				var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare;

				// Check if system is configured to override the rule for this study
				if (duplicateEnum == DuplicateProcessingEnum.OverwriteSop)
				{
					return OverwriteDuplicate(dupFile, uid, studyXml);
				}

				// Check if system is configured to override the rule for this study
				if (duplicateEnum == DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase)
				{
					return OverwriteAndUpdateDuplicate(dupFile, uid, studyXml);
				}

				var baseFile = new DicomFile(basePath);
				baseFile.Load();

				if (duplicateEnum == DuplicateProcessingEnum.OverwriteReport)
				{
					return ProcessDuplicateReport(dupFile, baseFile, uid, studyXml);
				}

				// DuplicateProcessingEnum.Compare
				if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax))
				{
					// If they're compressed, and we have a codec, lets decompress and still do the comparison
					if (dupFile.TransferSyntax.Encapsulated
					    && !dupFile.TransferSyntax.LossyCompressed
					    && DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null)
					{
						dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
					}

					if (baseFile.TransferSyntax.Encapsulated
					    && !baseFile.TransferSyntax.LossyCompressed
					    && DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null)
					{
						baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
					}

					if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated)
					{
						string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'",
						                               baseFile.TransferSyntax, dupFile.TransferSyntax);

						var list = new List<DicomAttributeComparisonResult>();
						var compareResult = new DicomAttributeComparisonResult
							{
								ResultType = ComparisonResultType.DifferentValues,
								TagName = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name,
								Details = failure
							};
						list.Add(compareResult);
						CreateDuplicateSIQEntry(uid, dupFile, list);
						result.ActionTaken = DuplicateProcessResultAction.Reconcile;
						return result;
					}
				}

				var failureReason = new List<DicomAttributeComparisonResult>();
				if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason))
				{
					Platform.Log(LogLevel.Info,
					             "Duplicate SOP being processed is identical.  Removing SOP: {0}",
					             baseFile.MediaStorageSopInstanceUid);


					RemoveWorkQueueUid(uid, duplicateSopPath);
					result.ActionTaken = DuplicateProcessResultAction.Delete;

				}
				else
				{
					CreateDuplicateSIQEntry(uid, dupFile, failureReason);
					result.ActionTaken = DuplicateProcessResultAction.Reconcile;
				}
			}

            return result;
        }
		private ProcessDuplicateResult OverwriteAndUpdateDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
		{
			Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

			var result = new ProcessDuplicateResult();
			result.ActionTaken = DuplicateProcessResultAction.Accept;

			using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
			{
				var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
				processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

				// Do so that the FileSize calculation inInsertStudyXmlCommand works
				dupFile.Filename = destination;

				// Update the StudyStream object
				var insertStudyXmlCommand = new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation);
				processor.AddCommand(insertStudyXmlCommand);

				// Ideally we don't need to insert the instance into the database since it's a duplicate.
				// However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
				// For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
				// But it will be corrected at the end of the processing when the study verification is executed.
				processor.AddCommand(new UpdateInstanceCommand(Context.StorageLocation.ServerPartition,Context.StorageLocation,dupFile));

				processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

				if (!processor.Execute())
				{
					EventManager.FireEvent(this, new FailedUpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason });

					// cause the item to fail
					throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
				}

				EventManager.FireEvent(this, new UpdateSopEventArgs { File = dupFile, ServerPartitionEntry = Context.StorageLocation.ServerPartition, WorkQueueUidEntry = uid, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize });
			}

			return result;
		}
        private ProcessDuplicateResult ProcessDuplicateReport(DicomFile dupFile, DicomFile baseFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();

            DateTime? dupTime = DateTimeParser.ParseDateAndTime(dupFile.DataSet, 0, DicomTags.InstanceCreationDate,
                                                                DicomTags.InstanceCreationTime);

            DateTime? baseTime = DateTimeParser.ParseDateAndTime(baseFile.DataSet, 0, DicomTags.InstanceCreationDate,
                                                                 DicomTags.InstanceCreationTime);

            if (dupTime.HasValue && baseTime.HasValue)
            {
                if (dupTime.Value <= baseTime.Value)
                {
                    RemoveWorkQueueUid(uid, dupFile.Filename);
                    result.ActionTaken = DuplicateProcessResultAction.Delete;
                    return result;
                }
            }

            result.ActionTaken = DuplicateProcessResultAction.Accept;
            SaveDuplicateReport(uid, dupFile.Filename, baseFile.Filename, dupFile, studyXml);
            return result;
        }
        private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();


            string duplicateSopPath = ServerPlatform.GetDuplicateUidPath(StorageLocation, uid);
            string basePath         = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(basePath))
            {
                // NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
                // no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
                // We have nothing to compare against so let's just throw it into the SIQ queue.
                CreateDuplicateSIQEntry(uid, dupFile, null);
                result.ActionTaken = DuplicateProcessResultAction.Reconcile;
            }
            else
            {
                // Check if system is configured to override the rule for this study
                if (DuplicatePolicy.IsParitionDuplicatePolicyOverridden(this.StorageLocation))
                {
                    return(OverwriteDuplicate(dupFile, uid, studyXml));
                }
                else
                {
                    var baseFile = new DicomFile(basePath);
                    baseFile.Load();

                    if (DuplicateSopProcessorHelper.SopClassIsReport(dupFile.SopClass.Uid) && ServerPartition.AcceptLatestReport)
                    {
                        return(ProcessDuplicateReport(dupFile, baseFile, uid, studyXml));
                    }


                    if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax))
                    {
                        // If they're compressed, and we have a codec, lets decompress and still do the comparison
                        if (dupFile.TransferSyntax.Encapsulated &&
                            !dupFile.TransferSyntax.LossyCompressed &&
                            DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null)
                        {
                            dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
                        }

                        if (baseFile.TransferSyntax.Encapsulated &&
                            !baseFile.TransferSyntax.LossyCompressed &&
                            DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null)
                        {
                            baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
                        }

                        if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated)
                        {
                            string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'",
                                                           baseFile.TransferSyntax, dupFile.TransferSyntax);

                            var list          = new List <DicomAttributeComparisonResult>();
                            var compareResult = new DicomAttributeComparisonResult
                            {
                                ResultType = ComparisonResultType.DifferentValues,
                                TagName    = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name,
                                Details    = failure
                            };
                            list.Add(compareResult);
                            CreateDuplicateSIQEntry(uid, dupFile, list);
                            result.ActionTaken = DuplicateProcessResultAction.Reconcile;
                            return(result);
                        }
                    }

                    var failureReason = new List <DicomAttributeComparisonResult>();
                    if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason))
                    {
                        Platform.Log(LogLevel.Info,
                                     "Duplicate SOP being processed is identical.  Removing SOP: {0}",
                                     baseFile.MediaStorageSopInstanceUid);


                        RemoveWorkQueueUid(uid, duplicateSopPath);
                        result.ActionTaken = DuplicateProcessResultAction.Delete;
                    }
                    else
                    {
                        CreateDuplicateSIQEntry(uid, dupFile, failureReason);
                        result.ActionTaken = DuplicateProcessResultAction.Reconcile;
                    }
                }
            }

            return(result);
        }
        /// <summary>
        /// Compares received duplicate with the existing copy in the filesystem and throw it into the SIQ if they differ.
        /// Otherwise, simply delete the duplicate and keep everything as it.
        /// </summary>
        /// <param name="dupFile"></param>
        /// <param name="baseFile"></param>
        /// <param name="uid"></param>
        /// <returns></returns>
        private ProcessDuplicateResult CompareDuplicates(DicomFile dupFile, DicomFile baseFile, WorkQueueUid uid)
        {
            var    result           = new ProcessDuplicateResult();
            string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid);

            if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax))
            {
                // If they're compressed, and we have a codec, lets decompress and still do the comparison
                if (dupFile.TransferSyntax.Encapsulated &&
                    !dupFile.TransferSyntax.LossyCompressed &&
                    DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null)
                {
                    dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
                }

                if (baseFile.TransferSyntax.Encapsulated &&
                    !baseFile.TransferSyntax.LossyCompressed &&
                    DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null)
                {
                    baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
                }

                if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated)
                {
                    string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'",
                                                   baseFile.TransferSyntax, dupFile.TransferSyntax);

                    var list          = new List <DicomAttributeComparisonResult>();
                    var compareResult = new DicomAttributeComparisonResult
                    {
                        ResultType = ComparisonResultType.DifferentValues,
                        TagName    = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name,
                        Details    = failure
                    };
                    list.Add(compareResult);
                    CreateDuplicateSIQEntry(uid, dupFile, list);
                    result.ActionTaken = DuplicateProcessResultAction.Reconcile;
                    return(result);
                }
            }

            var failureReason = new List <DicomAttributeComparisonResult>();

            if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason))
            {
                Platform.Log(LogLevel.Info,
                             "Duplicate SOP being processed is identical.  Removing SOP: {0}",
                             baseFile.MediaStorageSopInstanceUid);


                RemoveWorkQueueUid(uid, duplicateSopPath);
                result.ActionTaken = DuplicateProcessResultAction.Delete;
            }
            else
            {
                CreateDuplicateSIQEntry(uid, dupFile, failureReason);
                result.ActionTaken = DuplicateProcessResultAction.Reconcile;
            }

            return(result);
        }
		/// <summary>
		/// Compares received duplicate with the existing copy in the filesystem and throw it into the SIQ if they differ.
		/// Otherwise, simply delete the duplicate and keep everything as it.
		/// </summary>
		/// <param name="dupFile"></param>
		/// <param name="baseFile"></param>
		/// <param name="uid"></param>
		/// <returns></returns>
		private ProcessDuplicateResult CompareDuplicates(DicomFile dupFile, DicomFile baseFile, WorkQueueUid uid)
    	{
			var result = new ProcessDuplicateResult();
			string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid);
			
			if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax))
    		{
    			// If they're compressed, and we have a codec, lets decompress and still do the comparison
    			if (dupFile.TransferSyntax.Encapsulated
    			    && !dupFile.TransferSyntax.LossyCompressed
    			    && DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null)
    			{
    				dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
    			}

    			if (baseFile.TransferSyntax.Encapsulated
    			    && !baseFile.TransferSyntax.LossyCompressed
    			    && DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null)
    			{
    				baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
    			}

    			if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated)
    			{
    				string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'",
    				                               baseFile.TransferSyntax, dupFile.TransferSyntax);

    				var list = new List<DicomAttributeComparisonResult>();
    				var compareResult = new DicomAttributeComparisonResult
    				                    	{
    				                    		ResultType = ComparisonResultType.DifferentValues,
    				                    		TagName = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name,
    				                    		Details = failure
    				                    	};
    				list.Add(compareResult);
    				CreateDuplicateSIQEntry(uid, dupFile, list);
    				result.ActionTaken = DuplicateProcessResultAction.Reconcile;
    				return result;
    			}
    		}

    		var failureReason = new List<DicomAttributeComparisonResult>();
    		if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason))
    		{
    			Platform.Log(LogLevel.Info,
    			             "Duplicate SOP being processed is identical.  Removing SOP: {0}",
    			             baseFile.MediaStorageSopInstanceUid);


    			RemoveWorkQueueUid(uid, duplicateSopPath);
    			result.ActionTaken = DuplicateProcessResultAction.Delete;
    		}
    		else
    		{
    			CreateDuplicateSIQEntry(uid, dupFile, failureReason);
    			result.ActionTaken = DuplicateProcessResultAction.Reconcile;
    		}

			return result;
    	}
		private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
		{
			var result = new ProcessDuplicateResult();

			var data = uid.SerializeWorkQueueUidData;

			var basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
			if (!File.Exists(basePath))
			{
				// NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
				// no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
				// We have nothing to compare against so let's just throw it into the SIQ queue.
				CreateDuplicateSIQEntry(uid, dupFile, null);
				result.ActionTaken = DuplicateProcessResultAction.Reconcile;
			}
			else
			{
				var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare;

				switch(duplicateEnum)
				{
					case DuplicateProcessingEnum.OverwriteSop:
						// Note: There's actually no difference between OverwriteDuplicate and OverwriteAndUpdateDuplicate. But decided to leave it as is.
						return OverwriteDuplicate(dupFile, uid, studyXml);

					case DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase:
						return OverwriteAndUpdateDuplicate(dupFile, uid, studyXml);

					case DuplicateProcessingEnum.OverwriteReport:
						var file = new DicomFile(basePath);
						return ProcessDuplicateReport(dupFile, file, uid, studyXml);

					case DuplicateProcessingEnum.Compare:
						var baseFile = new DicomFile(basePath);
						return CompareDuplicates(dupFile, baseFile, uid);

					default:
						throw new InvalidOperationException("");
				}
				
			}

            return result;
        }
Example #12
0
        private ProcessDuplicateResult OverwriteDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            Platform.Log(LogLevel.Info, "Overwriting duplicate SOP {0}", uid.SopInstanceUid);

            var result = new ProcessDuplicateResult();
            result.ActionTaken = DuplicateProcessResultAction.Accept;

            using (var processor = new ServerCommandProcessor("Overwrite duplicate instance"))
            {
                var destination = Context.StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);
                processor.AddCommand(new RenameFileCommand(dupFile.Filename, destination, false));

                // Ideally we don't need to insert the instance into the database since it's a duplicate.
				// However, we need to do so to ensure the Study record is recreated if we are dealing with an orphan study.
				// For other cases, this will cause the instance count in the DB to be out of sync with the filesystem.
				// But it will be corrected at the end of the processing when the study verification is executed.
                processor.AddCommand(new InsertInstanceCommand(dupFile, Context.StorageLocation));
                
                // Update the StudyStream object
                processor.AddCommand(new InsertStudyXmlCommand(dupFile, studyXml, Context.StorageLocation));
                
                processor.AddCommand(new DeleteWorkQueueUidCommand(uid));

                if (!processor.Execute())
                {
                    // cause the item to fail
                    throw new Exception(string.Format("Error occurred when trying to overwrite duplicate in the filesystem."), processor.FailureException);
                }
            }

            return result;
        }