/// <summary> /// Check if the contents of the DicomAttributeCollection is identical to another DicomAttributeCollection instance. /// </summary> /// <remarks> /// <para> /// This method compares the contents of two attribute collections to see if they are equal. The method /// will step through each of the tags within the collection, and compare them to see if they are equal. The /// method will also recurse into sequence attributes to be sure they are equal.</para> /// </remarks> /// <param name="obj">The object to compare to.</param> /// <param name="comparisonResults">A list of <see cref="DicomAttributeComparisonResult"/> describing why the objects are not equal.</param> /// <returns>true if the collections are equal.</returns> public bool Equals(object obj, ref List <DicomAttributeComparisonResult> comparisonResults) { var a = obj as DicomFile; if (a == null) { var result = new DicomAttributeComparisonResult { ResultType = ComparisonResultType.InvalidType, Details = String.Format("Comparison object is invalid type: {0}", obj.GetType()) }; comparisonResults.Add(result); return(false); } if (!MetaInfo.Equals(a.MetaInfo, ref comparisonResults)) { return(false); } if (!DataSet.Equals(a.DataSet, ref comparisonResults)) { return(false); } return(true); }
private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml) { var result = new ProcessDuplicateResult(); var data = uid.SerializeWorkQueueUidData; string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid); string basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid); if (!File.Exists(basePath)) { // NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate // no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation. // We have nothing to compare against so let's just throw it into the SIQ queue. CreateDuplicateSIQEntry(uid, dupFile, null); result.ActionTaken = DuplicateProcessResultAction.Reconcile; } else { var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare; // Check if system is configured to override the rule for this study if (duplicateEnum == DuplicateProcessingEnum.OverwriteSop) { return OverwriteDuplicate(dupFile, uid, studyXml); } // Check if system is configured to override the rule for this study if (duplicateEnum == DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase) { return OverwriteAndUpdateDuplicate(dupFile, uid, studyXml); } var baseFile = new DicomFile(basePath); baseFile.Load(); if (duplicateEnum == DuplicateProcessingEnum.OverwriteReport) { return ProcessDuplicateReport(dupFile, baseFile, uid, studyXml); } // DuplicateProcessingEnum.Compare if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax)) { // If they're compressed, and we have a codec, lets decompress and still do the comparison if (dupFile.TransferSyntax.Encapsulated && !dupFile.TransferSyntax.LossyCompressed && DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null) { dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); } if (baseFile.TransferSyntax.Encapsulated && !baseFile.TransferSyntax.LossyCompressed && DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null) { baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); } if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated) { string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'", baseFile.TransferSyntax, dupFile.TransferSyntax); var list = new List<DicomAttributeComparisonResult>(); var compareResult = new DicomAttributeComparisonResult { ResultType = ComparisonResultType.DifferentValues, TagName = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name, Details = failure }; list.Add(compareResult); CreateDuplicateSIQEntry(uid, dupFile, list); result.ActionTaken = DuplicateProcessResultAction.Reconcile; return result; } } var failureReason = new List<DicomAttributeComparisonResult>(); if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason)) { Platform.Log(LogLevel.Info, "Duplicate SOP being processed is identical. Removing SOP: {0}", baseFile.MediaStorageSopInstanceUid); RemoveWorkQueueUid(uid, duplicateSopPath); result.ActionTaken = DuplicateProcessResultAction.Delete; } else { CreateDuplicateSIQEntry(uid, dupFile, failureReason); result.ActionTaken = DuplicateProcessResultAction.Reconcile; } } return result; }
/// <summary> /// Check if the contents of the DicomAttributeCollection is identical to another DicomAttributeCollection instance. /// </summary> /// <remarks> /// <para> /// This method compares the contents of two attribute collections to see if they are equal. The method /// will step through each of the tags within the collection, and compare them to see if they are equal. The /// method will also recurse into sequence attributes to be sure they are equal.</para> /// </remarks> /// <param name="obj">The object to compare to.</param> /// <param name="comparisonResults">A list of <see cref="DicomAttributeComparisonResult"/> describing why the objects are not equal.</param> /// <returns>true if the collections are equal.</returns> public bool Equals(object obj, ref List<DicomAttributeComparisonResult> comparisonResults) { DicomFile a = obj as DicomFile; if (a == null) { DicomAttributeComparisonResult result = new DicomAttributeComparisonResult(); result.ResultType = ComparisonResultType.InvalidType; result.Details = String.Format("Comparison object is invalid type: {0}", obj.GetType()); comparisonResults.Add(result); return false; } if (!MetaInfo.Equals(a.MetaInfo, ref comparisonResults)) return false; if (!DataSet.Equals(a.DataSet, ref comparisonResults)) return false; return true; }
/// <summary> /// Compares received duplicate with the existing copy in the filesystem and throw it into the SIQ if they differ. /// Otherwise, simply delete the duplicate and keep everything as it. /// </summary> /// <param name="dupFile"></param> /// <param name="baseFile"></param> /// <param name="uid"></param> /// <returns></returns> private ProcessDuplicateResult CompareDuplicates(DicomFile dupFile, DicomFile baseFile, WorkQueueUid uid) { var result = new ProcessDuplicateResult(); string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid); if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax)) { // If they're compressed, and we have a codec, lets decompress and still do the comparison if (dupFile.TransferSyntax.Encapsulated && !dupFile.TransferSyntax.LossyCompressed && DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null) { dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); } if (baseFile.TransferSyntax.Encapsulated && !baseFile.TransferSyntax.LossyCompressed && DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null) { baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); } if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated) { string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'", baseFile.TransferSyntax, dupFile.TransferSyntax); var list = new List<DicomAttributeComparisonResult>(); var compareResult = new DicomAttributeComparisonResult { ResultType = ComparisonResultType.DifferentValues, TagName = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name, Details = failure }; list.Add(compareResult); CreateDuplicateSIQEntry(uid, dupFile, list); result.ActionTaken = DuplicateProcessResultAction.Reconcile; return result; } } var failureReason = new List<DicomAttributeComparisonResult>(); if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason)) { Platform.Log(LogLevel.Info, "Duplicate SOP being processed is identical. Removing SOP: {0}", baseFile.MediaStorageSopInstanceUid); RemoveWorkQueueUid(uid, duplicateSopPath); result.ActionTaken = DuplicateProcessResultAction.Delete; } else { CreateDuplicateSIQEntry(uid, dupFile, failureReason); result.ActionTaken = DuplicateProcessResultAction.Reconcile; } return result; }