private DicomProcessingResult HandleDuplicateFile(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomMessageBase message, string sourceFilename, StudyProcessWorkQueueData data)
        {
            Study study = studyLocation.Study ??
                          studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext);

            if (study != null)
            {
                Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2}  Patient: {3}  ID:{4})",
                             sopInstanceUid,
                             study.AccessionNumber, study.StudyInstanceUid,
                             study.PatientsName, study.PatientId);
            }
            else
            {
                Platform.Log(LogLevel.Info,
                             "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.",
                             sopInstanceUid, studyLocation.StudyInstanceUid);
            }

            var sopProcessingContext = new SopInstanceProcessorContext(commandProcessor, studyLocation, _context.ContextID,
                                                                       _context.Request)
            {
                DuplicateProcessing = _context.DuplicateProcessing
            };
            DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, message, data,
                                                                               sourceFilename);

            return(result);
        }
        private DicomProcessingResult HandleDuplicate(string sopInstanceUid, StudyStorageLocation studyLocation, ServerCommandProcessor commandProcessor, DicomFile file)
        {
            Study study = studyLocation.Study ??
                          studyLocation.LoadStudy(ServerExecutionContext.Current.PersistenceContext);

            if (study != null)
            {
                Platform.Log(LogLevel.Info, "Received duplicate SOP {0} (A#:{1} StudyUid:{2}  Patient: {3}  ID:{4})",
                             sopInstanceUid,
                             study.AccessionNumber, study.StudyInstanceUid,
                             study.PatientsName, study.PatientId);
            }
            else
            {
                Platform.Log(LogLevel.Info,
                             "Received duplicate SOP {0} (StudyUid:{1}). Existing files haven't been processed.",
                             sopInstanceUid, studyLocation.StudyInstanceUid);
            }

            SopProcessingContext  sopProcessingContext = new SopProcessingContext(commandProcessor, studyLocation, _context.ContextID);
            DicomProcessingResult result = DuplicateSopProcessorHelper.Process(sopProcessingContext, file);

            return(result);
        }
Beispiel #3
0
        private void ProcessUidList()
        {
            string lastErrorMessage = "";

            Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count);

            StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage);

            // Load the rules engine
            context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey);
            context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress);
            context.SopProcessedRulesEngine.Load();

            // Add the update commands to
            context.UpdateCommands.AddRange(BuildUpdateCommandList());

            // Add command to update the Series & Sop Instances.
            context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper));

            // Load the Study XML File
            StudyXml xml = LoadStudyXml(_destinationStudyStorage);

            PrintUpdateCommands(context.UpdateCommands);
            foreach (WorkQueueUid uid in Context.WorkQueueUidList)
            {
                // Load the file outside the try/catch block so it can be
                // referenced in the c
                string    imagePath = GetReconcileUidPath(uid);
                DicomFile file      = new DicomFile(imagePath);

                try
                {
                    file.Load();

                    string groupID = ServerHelper.GetUidGroup(file, Context.Partition, Context.WorkQueueItem.InsertTime);

                    SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context)
                    {
                        EnforceNameRules = true
                    };

                    ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid),
                                                                       SopInstanceProcessorSopType.NewSop);
                    if (result.Status != ProcessingStatus.Success)
                    {
                        throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename));
                    }

                    _processedCount++;

                    Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, _processedCount, Context.WorkQueueUidList.Count);
                }
                catch (Exception e)
                {
                    Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid);

                    if (e is InstanceAlreadyExistsException ||
                        e.InnerException != null && e.InnerException is InstanceAlreadyExistsException)
                    {
                        // TODO (Rigel) - Check if we should include the WorkItemData to insert into the WorkQueue here.
                        DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid),
                                                                            Context.WorkQueueItem, uid, null);
                    }
                    else
                    {
                        lastErrorMessage = e.Message;
                        SopInstanceProcessor.FailUid(uid, true);
                    }
                    _failedCount++;
                }
            }

            if (_processedCount == 0)
            {
                throw new ApplicationException(lastErrorMessage);
            }
        }
        private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml)
        {
            var result = new ProcessDuplicateResult();


            string duplicateSopPath = ServerPlatform.GetDuplicateUidPath(StorageLocation, uid);
            string basePath         = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid);

            if (!File.Exists(basePath))
            {
                // NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate
                // no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation.
                // We have nothing to compare against so let's just throw it into the SIQ queue.
                CreateDuplicateSIQEntry(uid, dupFile, null);
                result.ActionTaken = DuplicateProcessResultAction.Reconcile;
            }
            else
            {
                // Check if system is configured to override the rule for this study
                if (DuplicatePolicy.IsParitionDuplicatePolicyOverridden(this.StorageLocation))
                {
                    return(OverwriteDuplicate(dupFile, uid, studyXml));
                }
                else
                {
                    var baseFile = new DicomFile(basePath);
                    baseFile.Load();

                    if (DuplicateSopProcessorHelper.SopClassIsReport(dupFile.SopClass.Uid) && ServerPartition.AcceptLatestReport)
                    {
                        return(ProcessDuplicateReport(dupFile, baseFile, uid, studyXml));
                    }


                    if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax))
                    {
                        // If they're compressed, and we have a codec, lets decompress and still do the comparison
                        if (dupFile.TransferSyntax.Encapsulated &&
                            !dupFile.TransferSyntax.LossyCompressed &&
                            DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null)
                        {
                            dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
                        }

                        if (baseFile.TransferSyntax.Encapsulated &&
                            !baseFile.TransferSyntax.LossyCompressed &&
                            DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null)
                        {
                            baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian);
                        }

                        if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated)
                        {
                            string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'",
                                                           baseFile.TransferSyntax, dupFile.TransferSyntax);

                            var list          = new List <DicomAttributeComparisonResult>();
                            var compareResult = new DicomAttributeComparisonResult
                            {
                                ResultType = ComparisonResultType.DifferentValues,
                                TagName    = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name,
                                Details    = failure
                            };
                            list.Add(compareResult);
                            CreateDuplicateSIQEntry(uid, dupFile, list);
                            result.ActionTaken = DuplicateProcessResultAction.Reconcile;
                            return(result);
                        }
                    }

                    var failureReason = new List <DicomAttributeComparisonResult>();
                    if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason))
                    {
                        Platform.Log(LogLevel.Info,
                                     "Duplicate SOP being processed is identical.  Removing SOP: {0}",
                                     baseFile.MediaStorageSopInstanceUid);


                        RemoveWorkQueueUid(uid, duplicateSopPath);
                        result.ActionTaken = DuplicateProcessResultAction.Delete;
                    }
                    else
                    {
                        CreateDuplicateSIQEntry(uid, dupFile, failureReason);
                        result.ActionTaken = DuplicateProcessResultAction.Reconcile;
                    }
                }
            }

            return(result);
        }
Beispiel #5
0
        private void ProcessUidList()
        {
            int counter = 0;

            Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count);

            StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage);

            // Load the rules engine
            context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey);
            context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress);
            context.SopProcessedRulesEngine.Load();

            // Load the Study XML File
            StudyXml xml = LoadStudyXml(_destinationStudyStorage);

            string lastErrorMessage = "";

            foreach (WorkQueueUid uid in Context.WorkQueueUidList)
            {
                string    imagePath = GetReconcileUidPath(uid);
                DicomFile file      = new DicomFile(imagePath);

                try
                {
                    file.Load();

                    string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime);

                    SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context);
                    ProcessingResult     result       = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid), SopInstanceProcessorSopType.NewSop);
                    if (result.Status != ProcessingStatus.Success)
                    {
                        throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename));
                    }

                    counter++;

                    Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]",
                                 uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count);
                }
                catch (Exception e)
                {
                    Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid);

                    if (e is InstanceAlreadyExistsException ||
                        e.InnerException != null && e.InnerException is InstanceAlreadyExistsException)
                    {
                        // TODO (Rigel) - Check if we should include the WorkQueueData field here
                        DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid),
                                                                            Context.WorkQueueItem, uid, null);
                    }
                    else
                    {
                        lastErrorMessage = e.Message;
                        SopInstanceProcessor.FailUid(uid, true);
                    }
                }
            }


            if (counter == 0)
            {
                throw new ApplicationException(lastErrorMessage);
            }
        }