/// <summary>
        /// ProcessSavedFile a specified <see cref="WorkQueueUid"/>
        /// </summary>
        /// <param name="item">The <see cref="WorkQueue"/> item being processed</param>
        /// <param name="sop">The <see cref="WorkQueueUid"/> being processed</param>
        /// <param name="studyXml">The <see cref="StudyXml"/> object for the study being processed</param>
        /// <returns>true if the <see cref="WorkQueueUid"/> is successfully processed. false otherwise</returns>
        protected virtual bool ProcessWorkQueueUid(Model.WorkQueue item, WorkQueueUid sop, StudyXml studyXml)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(sop, "sop");
            Platform.CheckForNullReference(studyXml, "studyXml");

            OnProcessUidBegin(item, sop);

            string path = null;

            try
            {
                if (sop.Duplicate && sop.Extension != null)
                {
                    path = ServerHelper.GetDuplicateUidPath(StorageLocation, sop);
                    var file = new DicomFile(path);
                    file.Load();

                    InstancePreProcessingResult result = PreProcessFile(sop, file);

                    if (false == file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid) ||
                        result.DiscardImage)
                    {
                        RemoveWorkQueueUid(sop, null);
                    }
                    else
                    {
                        var duplicateResult = ProcessDuplicate(file, sop, studyXml);
                        if (duplicateResult.ActionTaken == DuplicateProcessResultAction.Delete || duplicateResult.ActionTaken == DuplicateProcessResultAction.Accept)
                        {
                            // make sure the folder is also deleted if it's empty
                            string folder = Path.GetDirectoryName(path);

                            String reconcileRootFolder = ServerHelper.GetDuplicateFolderRootPath(StorageLocation);
                            DirectoryUtility.DeleteIfEmpty(folder, reconcileRootFolder);
                        }
                    }
                }
                else
                {
                    try
                    {
                        path = StorageLocation.GetSopInstancePath(sop.SeriesInstanceUid, sop.SopInstanceUid);
                        var file = new DicomFile(path);
                        file.Load();

                        InstancePreProcessingResult result = PreProcessFile(sop, file);

                        if (false == file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid) ||
                            result.DiscardImage)
                        {
                            RemoveWorkQueueUid(sop, path);
                        }
                        else
                        {
                            ProcessFile(sop, file, studyXml, !result.AutoReconciled);
                        }
                    }
                    catch (DicomException ex)
                    {
                        // bad file. Remove it from the filesystem and the queue
                        RemoveBadDicomFile(path, ex.Message);
                        DeleteWorkQueueUid(sop);
                        return(false);
                    }
                }

                return(true);
            }
            catch (StudyIsNearlineException)
            {
                // handled by caller
                throw;
            }
            catch (Exception e)
            {
                Platform.Log(LogLevel.Error, e, "Unexpected exception when processing file: {0} SOP Instance: {1}", path, sop.SopInstanceUid);
                item.FailureDescription = e.InnerException != null?
                                          String.Format("{0}:{1}", e.GetType().Name, e.InnerException.Message) : String.Format("{0}:{1}", e.GetType().Name, e.Message);

                //No longer needed.  Update was moved into the SopInstanceProcessor
                //sop.FailureCount++;
                //UpdateWorkQueueUid(sop);
                return(false);
            }
            finally
            {
                OnProcessUidEnd(item, sop);
            }
        }
Beispiel #2
0
        private bool ProcessWorkQueueUid(Model.WorkQueue item, WorkQueueUid sop, StudyXml studyXml, IDicomCodecFactory theCodecFactory)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(sop, "sop");
            Platform.CheckForNullReference(studyXml, "studyXml");

            if (!studyXml.Contains(sop.SeriesInstanceUid, sop.SopInstanceUid))
            {
                // Uid was inserted but not in the study xml.
                // Auto-recovery might have detect problem with that file and remove it from the study.
                // Assume the study xml has been corrected and ignore the uid.
                Platform.Log(LogLevel.Warn, "Skipping SOP {0} in series {1}. It is no longer part of the study.", sop.SopInstanceUid, sop.SeriesInstanceUid);

                // Delete it out of the queue
                DeleteWorkQueueUid(sop);
                return(true);
            }

            string basePath = Path.Combine(StorageLocation.GetStudyPath(), sop.SeriesInstanceUid);

            basePath = Path.Combine(basePath, sop.SopInstanceUid);
            string path;

            if (sop.Extension != null)
            {
                path = basePath + "." + sop.Extension;
            }
            else
            {
                path = basePath + ServerPlatform.DicomFileExtension;
            }

            try
            {
                ProcessFile(item, sop, path, studyXml, theCodecFactory);

                // WorkQueueUid has been deleted out by the processor

                return(true);
            }
            catch (Exception e)
            {
                if (e.InnerException != null && e.InnerException is DicomCodecUnsupportedSopException)
                {
                    Platform.Log(LogLevel.Warn, e, "Instance not supported for compressor: {0}.  Deleting WorkQueue entry for SOP {1}", e.Message, sop.SopInstanceUid);

                    item.FailureDescription = e.InnerException != null ? e.InnerException.Message : e.Message;

                    // Delete it out of the queue
                    DeleteWorkQueueUid(sop);

                    return(false);
                }
                Platform.Log(LogLevel.Error, e, "Unexpected exception when compressing file: {0} SOP Instance: {1}", path, sop.SopInstanceUid);
                item.FailureDescription = e.InnerException != null ? e.InnerException.Message : e.Message;

                sop.FailureCount++;

                UpdateWorkQueueUid(sop);

                return(false);
            }
        }
        public void Process(Model.WorkQueue item)
        {
            _workQueueItem = item;

            using (new WorkQueueProcessorContext(item))
            {
            	string failureDescription;
                if (!Initialize(item, out failureDescription))
                {
                	PostponeItem(failureDescription);
                	return;
                }

                if (!LoadWritableStorageLocation(item))
                {
                    PostponeItem("Unable to find writeable StorageLocation.");
                    return;
                }

                if (StorageLocation.QueueStudyStateEnum == QueueStudyStateEnum.ReprocessScheduled && !item.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.ReprocessStudy))
                {
                    //TODO: Should we check if the state is correct (ie, there's actually a ReprocessStudy work queue entry)?
                    PostponeItem("Study is scheduled for reprocess");
                    return;
                }

                if (CanStart())
                {
                    try
                    {
                        OnProcessItemBegin(item);
                        ProcessTime.Start();
                        ProcessItem(item);
                        ProcessTime.End();
                    }
                    catch(StudyIntegrityValidationFailure ex)
                    {
                        item.FailureDescription = ex.Message;
                        OnStudyIntegrityFailure(WorkQueueItem, ex.Message);
                    }
                    finally
                    {
                        OnProcessItemEnd(item);
                    }
                }
            }
        }
Beispiel #4
0
 void ResetWorkQueueDialog_WorkQueueItemReseted(Model.WorkQueue item)
 {
     SearchPanel.Refresh();
 }
Beispiel #5
0
        protected void ProcessFile(Model.WorkQueue item, WorkQueueUid sop, string path, StudyXml studyXml, IDicomCodecFactory theCodecFactory)
        {
            DicomFile file = null;

            _instanceStats = new CompressInstanceStatistics();

            _instanceStats.ProcessTime.Start();

            // Use the command processor for rollback capabilities.
            using (ServerCommandProcessor processor = new ServerCommandProcessor("Processing WorkQueue Compress DICOM File"))
            {
                string modality = String.Empty;

                try
                {
                    file = new DicomFile(path);

                    _instanceStats.FileLoadTime.Start();
                    file.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default);
                    _instanceStats.FileLoadTime.End();

                    modality = file.DataSet[DicomTags.Modality].GetString(0, String.Empty);

                    FileInfo fileInfo = new FileInfo(path);
                    _instanceStats.FileSize = (ulong)fileInfo.Length;

                    // Get the Patients Name for processing purposes.
                    String patientsName = file.DataSet[DicomTags.PatientsName].GetString(0, "");

                    if (file.TransferSyntax.Equals(theCodecFactory.CodecTransferSyntax))
                    {
                        // Delete the WorkQueueUid item
                        processor.AddCommand(new DeleteWorkQueueUidCommand(sop));

                        // Do the actual processing
                        if (!processor.Execute())
                        {
                            Platform.Log(LogLevel.Warn, "Failure deleteing WorkQueueUid: {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid);
                            Platform.Log(LogLevel.Warn, "Compression file that failed: {0}", file.Filename);
                        }
                        else
                        {
                            Platform.Log(LogLevel.Warn, "Skip compressing SOP {0}. Its current transfer syntax is {1}",
                                         file.MediaStorageSopInstanceUid, file.TransferSyntax.Name);
                        }
                    }
                    else
                    {
                        IDicomCodec codec = theCodecFactory.GetDicomCodec();

                        // Create a context for applying actions from the rules engine
                        var context = new ServerActionContext(file, StorageLocation.FilesystemKey, ServerPartition, item.StudyStorageKey);
                        context.CommandProcessor = processor;

                        var parms           = theCodecFactory.GetCodecParameters(item.Data);
                        var compressCommand =
                            new DicomCompressCommand(context.Message, theCodecFactory.CodecTransferSyntax, codec, parms);
                        processor.AddCommand(compressCommand);

                        var save = new SaveDicomFileCommand(file.Filename, file, false);
                        processor.AddCommand(save);

                        // Update the StudyStream object, must be done after compression
                        // and after the compressed image has been successfully saved
                        var insertStudyXmlCommand = new UpdateStudyXmlCommand(file, studyXml, StorageLocation);
                        processor.AddCommand(insertStudyXmlCommand);

                        // Delete the WorkQueueUid item
                        processor.AddCommand(new DeleteWorkQueueUidCommand(sop));

                        // Do the actual processing
                        if (!processor.Execute())
                        {
                            EventManager.FireEvent(this, new FailedUpdateSopEventArgs {
                                File = file, ServerPartitionEntry = context.ServerPartition, WorkQueueUidEntry = sop, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize, FailureMessage = processor.FailureReason
                            });

                            _instanceStats.CompressTime.Add(compressCommand.CompressTime);
                            Platform.Log(LogLevel.Error, "Failure compressing command {0} for SOP: {1}", processor.Description, file.MediaStorageSopInstanceUid);
                            Platform.Log(LogLevel.Error, "Compression file that failed: {0}", file.Filename);
                            throw new ApplicationException("Unexpected failure (" + processor.FailureReason + ") executing command for SOP: " + file.MediaStorageSopInstanceUid, processor.FailureException);
                        }
                        _instanceStats.CompressTime.Add(compressCommand.CompressTime);
                        Platform.Log(ServerPlatform.InstanceLogLevel, "Compress SOP: {0} for Patient {1}", file.MediaStorageSopInstanceUid,
                                     patientsName);

                        EventManager.FireEvent(this, new UpdateSopEventArgs {
                            File = file, ServerPartitionEntry = context.ServerPartition, WorkQueueUidEntry = sop, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)insertStudyXmlCommand.FileSize
                        });
                    }
                }
                catch (Exception e)
                {
                    EventManager.FireEvent(this, new FailedUpdateSopEventArgs {
                        File = file, ServerPartitionEntry = ServerPartition, WorkQueueUidEntry = sop, WorkQueueEntry = WorkQueueItem, FileLength = (ulong)new FileInfo(path).Length, FailureMessage = processor.FailureReason
                    });

                    Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}.  Rolling back operation.",
                                 processor.Description);
                    processor.Rollback();

                    throw;
                }
                finally
                {
                    _instanceStats.ProcessTime.End();
                    _studyStats.AddSubStats(_instanceStats);

                    _studyStats.StudyInstanceUid = StorageLocation.StudyInstanceUid;
                    if (String.IsNullOrEmpty(modality) == false)
                    {
                        _studyStats.Modality = modality;
                    }

                    // Update the statistics
                    _studyStats.NumInstances++;
                }
            }
        }
Beispiel #6
0
        private static WorkQueueDetails CreateProcessDuplicateWorkQueueItemDetails(Model.WorkQueue item)
        {
            var detail = new WorkQueueDetails();

            detail.Key = item.Key;
            detail.ScheduledDateTime = item.ScheduledTime;
            detail.ExpirationTime    = item.ExpirationTime;
            detail.InsertTime        = item.InsertTime;
            detail.FailureCount      = item.FailureCount;
            detail.Type               = item.WorkQueueTypeEnum;
            detail.Status             = item.WorkQueueStatusEnum;
            detail.Priority           = item.WorkQueuePriorityEnum;
            detail.FailureDescription = item.FailureDescription;
            detail.ServerDescription  = item.ProcessorID;

            StudyStorageLocation storage = WorkQueueController.GetLoadStorageLocation(item);

            detail.StorageLocationPath = storage.GetStudyPath();

            XmlDocument doc      = item.Data;
            XmlNodeList nodeList = doc.GetElementsByTagName("DuplicateSopFolder");

            detail.DuplicateStorageLocationPath = nodeList[0].InnerText;

            // Fetch UIDs
            var wqUidsAdaptor = new WorkQueueUidAdaptor();
            var uidCriteria   = new WorkQueueUidSelectCriteria();

            uidCriteria.WorkQueueKey.EqualTo(item.GetKey());
            IList <WorkQueueUid> uids = wqUidsAdaptor.Get(uidCriteria);

            var mapSeries = new Hashtable();

            foreach (WorkQueueUid uid in uids)
            {
                if (mapSeries.ContainsKey(uid.SeriesInstanceUid) == false)
                {
                    mapSeries.Add(uid.SeriesInstanceUid, uid.SopInstanceUid);
                }
            }

            detail.NumInstancesPending = uids.Count;
            detail.NumSeriesPending    = mapSeries.Count;


            // Fetch the study and patient info
            var          ssAdaptor = new StudyStorageAdaptor();
            StudyStorage storages  = ssAdaptor.Get(item.StudyStorageKey);

            var studyAdaptor  = new StudyAdaptor();
            var studycriteria = new StudySelectCriteria();

            studycriteria.StudyInstanceUid.EqualTo(storages.StudyInstanceUid);
            studycriteria.ServerPartitionKey.EqualTo(item.ServerPartitionKey);
            Study study = studyAdaptor.GetFirst(studycriteria);

            // Study may not be available until the images are processed.
            if (study != null)
            {
                var studyAssembler = new StudyDetailsAssembler();
                detail.Study = studyAssembler.CreateStudyDetail(study);
            }
            return(detail);
        }
Beispiel #7
0
        private static WorkQueueDetails CreateEditWorkQueueItemDetails(Model.WorkQueue item)
        {
            string studyPath;

            try
            {
                StudyStorageLocation storage = WorkQueueController.GetLoadStorageLocation(item);
                studyPath = storage.GetStudyPath();
            }
            catch (Exception)
            {
                studyPath = string.Empty;
            }
            var detail = new WorkQueueDetails
            {
                Key = item.Key,
                ScheduledDateTime = item.ScheduledTime,
                ExpirationTime    = item.ExpirationTime,
                InsertTime        = item.InsertTime,
                FailureCount      = item.FailureCount,
                Type                = item.WorkQueueTypeEnum,
                Status              = item.WorkQueueStatusEnum,
                Priority            = item.WorkQueuePriorityEnum,
                FailureDescription  = item.FailureDescription,
                ServerDescription   = item.ProcessorID,
                StorageLocationPath = studyPath
            };



            // Fetch UIDs
            var wqUidsAdaptor = new WorkQueueUidAdaptor();
            var uidCriteria   = new WorkQueueUidSelectCriteria();

            uidCriteria.WorkQueueKey.EqualTo(item.GetKey());
            IList <WorkQueueUid> uids = wqUidsAdaptor.Get(uidCriteria);

            var mapSeries = new Hashtable();

            foreach (WorkQueueUid uid in uids)
            {
                if (mapSeries.ContainsKey(uid.SeriesInstanceUid) == false)
                {
                    mapSeries.Add(uid.SeriesInstanceUid, uid.SopInstanceUid);
                }
            }

            detail.NumInstancesPending = uids.Count;
            detail.NumSeriesPending    = mapSeries.Count;


            // Fetch the study and patient info
            var          ssAdaptor = new StudyStorageAdaptor();
            StudyStorage storages  = ssAdaptor.Get(item.StudyStorageKey);

            var studyAdaptor  = new StudyAdaptor();
            var studycriteria = new StudySelectCriteria();

            studycriteria.StudyInstanceUid.EqualTo(storages.StudyInstanceUid);
            studycriteria.ServerPartitionKey.EqualTo(item.ServerPartitionKey);
            Study study = studyAdaptor.GetFirst(studycriteria);

            // Study may not be available until the images are processed.
            if (study != null)
            {
                var studyAssembler = new StudyDetailsAssembler();
                detail.Study = studyAssembler.CreateStudyDetail(study);
            }

            var parser = new EditStudyWorkQueueDataParser();
            EditStudyWorkQueueData data = parser.Parse(item.Data.DocumentElement);

            detail.EditUpdateItems = data.EditRequest.UpdateEntries.ToArray();

            return(detail);
        }
Beispiel #8
0
 public WorkQueueProcessorContext(Model.WorkQueue item)
     : base(item.GetKey().Key.ToString())
 {
     _item = item;
 }
 public WorkQueueThreadParameter(IWorkQueueItemProcessor processor, Model.WorkQueue item, WorkQueueThreadDelegate del)
 {
     _item      = item;
     _processor = processor;
     _del       = del;
 }
Beispiel #10
0
 private void ResetWorkQueueDialog_WorkQueueItemReseted(Model.WorkQueue item)
 {
     DataBind();
     WorkQueueItemDetailsPanel.Refresh();
 }
Beispiel #11
0
 private void WorkQueueItemDetailsPanel_RescheduleButtonClick(object sender, WorkQueueDetailsButtonEventArg e)
 {
     Model.WorkQueue item = e.WorkQueueItem;
     RescheduleWorkQueueItem(item);
 }
Beispiel #12
0
 private void DeleteWorkQueueDialog_WorkQueueItemDeleted(Model.WorkQueue item)
 {
     DataBind();
     Response.Redirect(ImageServerConstants.PageURLs.WorkQueueItemDeletedPage);
 }
        protected override void ProcessItem(Model.WorkQueue item)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(StorageLocation, "StorageLocation");

            // Verify the study is not lossy online and lossless in the archive.
            // This could happen if the images were received WHILE the study was being lossy compressed.
            // The study state would not be set until the compression was completed or partially completed.
            CheckIfStudyIsLossy();


            Statistics.TotalProcessTime.Start();
            bool successful;
            bool idle = false;

            //Load the specific UIDs that need to be processed.
            LoadUids(item);

            int totalUidCount = WorkQueueUidList.Count;

            if (totalUidCount == 0)
            {
                successful = true;
                idle       = true;
            }
            else
            {
                try
                {
                    Context = new StudyProcessorContext(StorageLocation, WorkQueueItem);

                    // Load the rules engine
                    _sopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, item.ServerPartitionKey);
                    _sopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress);
                    _sopProcessedRulesEngine.Load();
                    Statistics.SopProcessedEngineLoadTime.Add(_sopProcessedRulesEngine.Statistics.LoadTime);
                    Context.SopProcessedRulesEngine = _sopProcessedRulesEngine;

                    if (Study != null)
                    {
                        Platform.Log(LogLevel.Info, "Processing study {0} for Patient {1} (PatientId:{2} A#:{3}), {4} objects",
                                     Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                                     Study.AccessionNumber, WorkQueueUidList.Count);
                    }
                    else
                    {
                        Platform.Log(LogLevel.Info, "Processing study {0}, {1} objects",
                                     StorageLocation.StudyInstanceUid, WorkQueueUidList.Count);
                    }

                    // ProcessSavedFile the images in the list
                    successful = ProcessUidList(item) > 0;
                }
                catch (StudyIsNearlineException ex)
                {
                    // delay until the target is restored
                    // NOTE: If the study could not be restored after certain period of time, this entry will be failed.
                    if (ex.RestoreRequested)
                    {
                        PostponeItem(string.Format("Unable to auto-reconcile at this time: the target study {0} is not online yet. Restore has been requested.", ex.StudyInstanceUid));
                        return;
                    }
                    // fail right away
                    FailQueueItem(item, string.Format("Unable to auto-reconcile at this time: the target study {0} is not nearline and could not be restored.", ex.StudyInstanceUid));
                    return;
                }
            }
            Statistics.TotalProcessTime.End();

            if (successful)
            {
                if (idle && item.ExpirationTime <= Platform.Time)
                {
                    // Run Study / Series Rules Engine.
                    var engine = new StudyRulesEngine(StorageLocation, ServerPartition);
                    engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed);

                    // Log the FilesystemQueue related entries
                    StorageLocation.LogFilesystemQueue();

                    // Delete the queue entry.
                    PostProcessing(item,
                                   WorkQueueProcessorStatus.Complete,
                                   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
                else if (idle)
                {
                    PostProcessing(item,
                                   WorkQueueProcessorStatus.IdleNoDelete,                                 // Don't delete, so we ensure the rules engine is run later.
                                   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
                else
                {
                    PostProcessing(item,
                                   WorkQueueProcessorStatus.Pending,
                                   WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
            }
            else
            {
                bool allFailedDuplicate = CollectionUtils.TrueForAll(WorkQueueUidList, uid => uid.Duplicate && uid.Failed);

                if (allFailedDuplicate)
                {
                    Platform.Log(LogLevel.Error, "All entries are duplicates");

                    PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
                    return;
                }
                PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal);
            }
        }
 /// <summary>
 /// Called before the specified <see cref="WorkQueueUid"/> is processed
 /// </summary>
 /// <param name="item">The <see cref="WorkQueue"/> item being processed</param>
 /// <param name="uid">The <see cref="WorkQueueUid"/> being processed</param>
 protected virtual void OnProcessUidBegin(Model.WorkQueue item, WorkQueueUid uid)
 {
     Platform.CheckForNullReference(item, "item");
     Platform.CheckForNullReference(uid, "uid");
 }
        /// <summary>
        /// Method for getting next <see cref="WorkQueue"/> entry.
        /// </summary>
        /// <param name="processorId">The Id of the processor.</param>
        /// <remarks>
        /// </remarks>
        /// <returns>
        /// A <see cref="WorkQueue"/> entry if found, or else null;
        /// </returns>
        public Model.WorkQueue GetWorkQueueItem(string processorId)
        {
            Model.WorkQueue queueListItem = null;

            // First check for Stat WorkQueue items.
            if (_threadPool.MemoryLimitedThreadsAvailable)
            {
                using (
                    IUpdateContext updateContext =
                        PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                {
                    IQueryWorkQueue          select = updateContext.GetBroker <IQueryWorkQueue>();
                    WorkQueueQueryParameters parms  = new WorkQueueQueryParameters
                    {
                        ProcessorID           = processorId,
                        WorkQueuePriorityEnum = WorkQueuePriorityEnum.Stat
                    };

                    queueListItem = select.FindOne(parms);
                    if (queueListItem != null)
                    {
                        updateContext.Commit();
                    }
                }
            }

            // If we don't have the max high priority threads in use,
            // first see if there's any available
            if (queueListItem == null &&
                _threadPool.HighPriorityThreadsAvailable)
            {
                using (
                    IUpdateContext updateContext =
                        PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                {
                    IQueryWorkQueue          select = updateContext.GetBroker <IQueryWorkQueue>();
                    WorkQueueQueryParameters parms  = new WorkQueueQueryParameters
                    {
                        ProcessorID           = processorId,
                        WorkQueuePriorityEnum = WorkQueuePriorityEnum.High
                    };

                    queueListItem = select.FindOne(parms);
                    if (queueListItem != null)
                    {
                        updateContext.Commit();
                    }
                }
            }

            // If we didn't find a high priority work queue item, and we have threads
            // available for memory limited work queue items, query for the next queue item available.
            if (queueListItem == null &&
                _threadPool.MemoryLimitedThreadsAvailable)
            {
                using (IUpdateContext updateContext =
                           PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                {
                    IQueryWorkQueue          select = updateContext.GetBroker <IQueryWorkQueue>();
                    WorkQueueQueryParameters parms  = new WorkQueueQueryParameters
                    {
                        ProcessorID = processorId
                    };

                    queueListItem = select.FindOne(parms);
                    if (queueListItem != null)
                    {
                        updateContext.Commit();
                    }
                }
            }

            // This logic only accessed if memory limited and priority threads are used up
            if (queueListItem == null &&
                !_threadPool.MemoryLimitedThreadsAvailable)
            {
                using (IUpdateContext updateContext =
                           PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                {
                    IQueryWorkQueue          select = updateContext.GetBroker <IQueryWorkQueue>();
                    WorkQueueQueryParameters parms  = new WorkQueueQueryParameters
                    {
                        ProcessorID           = processorId,
                        WorkQueuePriorityEnum = WorkQueuePriorityEnum.Stat,
                        MemoryLimited         = true
                    };

                    queueListItem = select.FindOne(parms);
                    if (queueListItem != null)
                    {
                        updateContext.Commit();
                    }
                }
            }

            // This logic only accessed if memory limited and priority threads are used up
            if (queueListItem == null &&
                !_threadPool.MemoryLimitedThreadsAvailable)
            {
                using (IUpdateContext updateContext =
                           PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
                {
                    IQueryWorkQueue          select = updateContext.GetBroker <IQueryWorkQueue>();
                    WorkQueueQueryParameters parms  = new WorkQueueQueryParameters
                    {
                        ProcessorID   = processorId,
                        MemoryLimited = true
                    };

                    queueListItem = select.FindOne(parms);
                    if (queueListItem != null)
                    {
                        updateContext.Commit();
                    }
                }
            }

            return(queueListItem);
        }
Beispiel #16
0
        protected override void ProcessItem(Model.WorkQueue item)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(item.StudyStorageKey, "item.StudyStorageKey");

            var context = new StudyProcessorContext(StorageLocation);

            // TODO: Should we enforce the patient's name rule?
            // If we do, the Study record will have the new patient's name
            // but how should we handle the name in the Patient record?
            bool enforceNameRules = false;
            var  processor        = new SopInstanceProcessor(context)
            {
                EnforceNameRules = enforceNameRules
            };

            var seriesMap = new Dictionary <string, List <string> >();

            bool   successful         = true;
            string failureDescription = null;

            // The processor stores its state in the Data column
            ReadQueueData(item);


            if (_queueData.State == null || !_queueData.State.ExecuteAtLeastOnce)
            {
                // Added for ticket #9673:
                // If the study folder does not exist and the study has been archived, trigger a restore and we're done
                if (!Directory.Exists(StorageLocation.GetStudyPath()))
                {
                    if (StorageLocation.ArchiveLocations.Count > 0)
                    {
                        Platform.Log(LogLevel.Info,
                                     "Reprocessing archived study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} without study data on the filesystem.  Inserting Restore Request.",
                                     Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                                     Study.AccessionNumber, ServerPartition.Description);

                        PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState);

                        // Post process had to be done first so the study is unlocked so the RestoreRequest can be inserted.
                        ServerHelper.InsertRestoreRequest(StorageLocation);

                        RaiseAlert(WorkQueueItem, AlertLevel.Warning,
                                   string.Format(
                                       "Found study {0} for Patient {1} (A#:{2})on Partition {3} without storage folder, restoring study.",
                                       Study.StudyInstanceUid, Study.PatientsName, Study.AccessionNumber, ServerPartition.Description));
                        return;
                    }
                }

                if (Study == null)
                {
                    Platform.Log(LogLevel.Info,
                                 "Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid,
                                 ServerPartition.Description);
                }
                else
                {
                    Platform.Log(LogLevel.Info,
                                 "Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}",
                                 Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                                 Study.AccessionNumber, ServerPartition.Description);
                }

                CleanupDatabase();
            }
            else
            {
                if (_queueData.State.Completed)
                {
                    #region SAFE-GUARD CODE: PREVENT INFINITE LOOP

                    // The processor indicated it had completed reprocessing in previous run. The entry should have been removed and this block of code should never be called.
                    // However, we have seen ReprocessStudy entries that mysterously contain rows in the WorkQueueUid table.
                    // The rows prevent the entry from being removed from the database and the ReprocessStudy keeps repeating itself.


                    // update the state first, increment the CompleteAttemptCount
                    _queueData.State.ExecuteAtLeastOnce = true;
                    _queueData.State.Completed          = true;
                    _queueData.State.CompleteAttemptCount++;
                    SaveState(item, _queueData);

                    if (_queueData.State.CompleteAttemptCount < 10)
                    {
                        // maybe there was db error in previous attempt to remove the entry. Let's try again.
                        Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} but it was already completed!!!", StorageLocation.StudyInstanceUid);
                        PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                    }
                    else
                    {
                        // we are definitely stuck.
                        Platform.Log(LogLevel.Error, "ReprocessStudy {0} for study {1} appears stuck. Aborting it.", item.Key, StorageLocation.StudyInstanceUid);
                        item.FailureDescription = "This entry had completed but could not be removed.";
                        PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
                    }

                    return;

                    #endregion
                }

                if (Study == null)
                {
                    Platform.Log(LogLevel.Info,
                                 "Resuming Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid,
                                 ServerPartition.Description);
                }
                else
                {
                    Platform.Log(LogLevel.Info,
                                 "Resuming Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}",
                                 Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                                 Study.AccessionNumber, ServerPartition.Description);
                }
            }

            StudyXml studyXml = LoadStudyXml();

            int reprocessedCounter = 0;
            var removedFiles       = new List <FileInfo>();
            try
            {
                // Traverse the directories, process 500 files at a time
                FileProcessor.Process(StorageLocation.GetStudyPath(), "*.*",
                                      delegate(string path, out bool cancel)
                {
                    #region Reprocess File

                    var file = new FileInfo(path);

                    // ignore all files except those ending ".dcm"
                    // ignore "bad(0).dcm" files too
                    if (Regex.IsMatch(file.Name.ToUpper(), "[0-9]+\\.DCM$"))
                    {
                        try
                        {
                            var dicomFile = new DicomFile(path);
                            dicomFile.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default);

                            string seriesUid   = dicomFile.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty);
                            string instanceUid = dicomFile.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty);
                            if (studyXml.Contains(seriesUid, instanceUid))
                            {
                                if (!seriesMap.ContainsKey(seriesUid))
                                {
                                    seriesMap.Add(seriesUid, new List <string>());
                                }
                                if (!seriesMap[seriesUid].Contains(instanceUid))
                                {
                                    seriesMap[seriesUid].Add(instanceUid);
                                }
                                else
                                {
                                    Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path);
                                }
                            }
                            else
                            {
                                Platform.Log(ServerPlatform.InstanceLogLevel, "Reprocessing SOP {0} for study {1}", instanceUid, StorageLocation.StudyInstanceUid);
                                string groupId          = ServerHelper.GetUidGroup(dicomFile, StorageLocation.ServerPartition, WorkQueueItem.InsertTime);
                                ProcessingResult result = processor.ProcessFile(groupId, dicomFile, studyXml, true, false, null, null);
                                switch (result.Status)
                                {
                                case ProcessingStatus.Success:
                                    reprocessedCounter++;
                                    if (!seriesMap.ContainsKey(seriesUid))
                                    {
                                        seriesMap.Add(seriesUid, new List <string>());
                                    }

                                    if (!seriesMap[seriesUid].Contains(instanceUid))
                                    {
                                        seriesMap[seriesUid].Add(instanceUid);
                                    }
                                    else
                                    {
                                        Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path);
                                    }
                                    break;

                                case ProcessingStatus.Reconciled:
                                    Platform.Log(LogLevel.Warn, "SOP was unexpectedly reconciled on reprocess SOP {0} for study {1}. It will be removed from the folder.", instanceUid, StorageLocation.StudyInstanceUid);
                                    failureDescription = String.Format("SOP Was reconciled: {0}", instanceUid);

                                    // Added for #10620 (Previously we didn't do anything here)
                                    // Because we are reprocessing files in the study folder, when file needs to be reconciled it is copied to the reconcile folder
                                    // Therefore, we need to delete the one in the study folder. Otherwise, there will be problem when the SIQ entry is reconciled.
                                    // InstanceAlreadyExistsException will also be thrown by the SOpInstanceProcessor if this ReprocessStudy WQI
                                    // resumes and reprocesses the same file again.
                                    // Note: we are sure that the file has been copied to the Reconcile folder and there's no way back.
                                    // We must get rid of this file in the study folder.
                                    FileUtils.Delete(path);

                                    // Special handling: if the file is one which we're supposed to reprocess at the end (see ProcessAdditionalFiles), we must remove the file from the list
                                    if (_additionalFilesToProcess != null && _additionalFilesToProcess.Contains(path))
                                    {
                                        _additionalFilesToProcess.Remove(path);
                                    }

                                    break;
                                }
                            }
                        }
                        catch (DicomException ex)
                        {
                            // TODO : should we fail the reprocess instead? Deleting an dicom file can lead to incomplete study.
                            removedFiles.Add(file);
                            Platform.Log(LogLevel.Warn, "Skip reprocessing and delete {0}: Not readable.", path);
                            FileUtils.Delete(path);
                            failureDescription = ex.Message;
                        }
                    }
                    else if (!file.Extension.Equals(".xml") && !file.Extension.Equals(".gz"))
                    {
                        // not a ".dcm" or header file, delete it
                        removedFiles.Add(file);
                        FileUtils.Delete(path);
                    }

                    #endregion

                    cancel = reprocessedCounter >= 500;
                }, true);

                if (studyXml != null)
                {
                    EnsureConsistentObjectCount(studyXml, seriesMap);
                    SaveStudyXml(studyXml);
                }

                // Completed if either all files have been reprocessed
                // or no more dicom files left that can be reprocessed.
                _completed = reprocessedCounter == 0;
            }
            catch (Exception e)
            {
                successful         = false;
                failureDescription = e.Message;
                Platform.Log(LogLevel.Error, e, "Unexpected exception when reprocessing study: {0}", StorageLocation.StudyInstanceUid);
                Platform.Log(LogLevel.Error, "Study may be in invalid unprocessed state.  Study location: {0}", StorageLocation.GetStudyPath());
                throw;
            }
            finally
            {
                LogRemovedFiles(removedFiles);

                // Update the state
                _queueData.State.ExecuteAtLeastOnce = true;
                _queueData.State.Completed          = _completed;
                _queueData.State.CompleteAttemptCount++;
                SaveState(item, _queueData);

                if (!successful)
                {
                    FailQueueItem(item, failureDescription);
                }
                else
                {
                    if (!_completed)
                    {
                        // Put it back to Pending
                        PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None);
                    }
                    else
                    {
                        LogHistory();

                        // Run Study / Series Rules Engine.
                        var engine = new StudyRulesEngine(StorageLocation, ServerPartition);
                        engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed);

                        // Log the FilesystemQueue related entries
                        StorageLocation.LogFilesystemQueue();

                        PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState);

                        Platform.Log(LogLevel.Info, "Completed reprocessing of study {0} on partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description);
                    }
                }
            }
        }
        /// <summary>
        /// Process a <see cref="WorkQueue"/> item of type AutoRoute.
        /// </summary>
        protected override void ProcessItem(Model.WorkQueue item)
        {
            if (WorkQueueItem.ScheduledTime >= WorkQueueItem.ExpirationTime && !HasPendingItems)
            {
                Platform.Log(LogLevel.Debug, "Removing Idle {0} entry : {1}", item.WorkQueueTypeEnum, item.GetKey().Key);
                base.PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.None);
                return;
            }

            if (!HasPendingItems)
            {
                // nothing to process, change to idle state
                PostProcessing(item, WorkQueueProcessorStatus.Idle, WorkQueueProcessorDatabaseUpdate.None);
                return;
            }

            Platform.Log(LogLevel.Info,
                         "Moving study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} to {5}...",
                         Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber,
                         ServerPartition.Description, DestinationDevice.AeTitle);

            // Load remote device information from the database.
            Device device = DestinationDevice;

            if (device == null)
            {
                item.FailureDescription = String.Format("Unknown auto-route destination \"{0}\"", item.DeviceKey);
                Platform.Log(LogLevel.Error, item.FailureDescription);

                PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); // Fatal Error
                return;
            }

            if (device.Dhcp && device.IpAddress.Length == 0)
            {
                item.FailureDescription = String.Format("Auto-route destination is a DHCP device with no known IP address: \"{0}\"", device.AeTitle);
                Platform.Log(LogLevel.Error,
                             item.FailureDescription);

                PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); // Fatal error
                return;
            }


            // Now setup the StorageSCU component
            int sendCounter = 0;

            using (ImageServerStorageScu scu = new ImageServerStorageScu(ServerPartition, device))
            {
                using (ServerExecutionContext context = new ServerExecutionContext())
                    // set the preferred syntax lists
                    scu.LoadPreferredSyntaxes(context.ReadContext);

                // Load the Instances to Send into the SCU component
                scu.AddStorageInstanceList(InstanceList);

                // Set an event to be called when each image is transferred
                scu.ImageStoreCompleted += delegate(Object sender, StorageInstance instance)
                {
                    if (instance.SendStatus.Status == DicomState.Success ||
                        instance.SendStatus.Status == DicomState.Warning ||
                        instance.SendStatus.Equals(DicomStatuses.SOPClassNotSupported))
                    {
                        sendCounter++;
                        OnInstanceSent(instance);
                    }

                    if (instance.SendStatus.Status == DicomState.Failure)
                    {
                        scu.FailureDescription = instance.SendStatus.Description;
                        if (false == String.IsNullOrEmpty(instance.ExtendedFailureDescription))
                        {
                            scu.FailureDescription = String.Format("{0} [{1}]", scu.FailureDescription,
                                                                   instance.ExtendedFailureDescription);
                        }
                    }


                    if (CancelPending && !(this is WebMoveStudyItemProcessor) && !scu.Canceled)
                    {
                        Platform.Log(LogLevel.Info, "Auto-route canceled due to shutdown for study: {0}",
                                     StorageLocation.StudyInstanceUid);
                        item.FailureDescription = "Operation was canceled due to server shutdown request.";
                        scu.Cancel();
                    }
                };

                try
                {
                    // Block until send is complete
                    scu.Send();

                    // Join for the thread to exit
                    scu.Join();
                }
                catch (Exception ex)
                {
                    Platform.Log(LogLevel.Error, ex, "Error occurs while sending images to {0} : {1}", device.AeTitle, ex.Message);
                }
                finally
                {
                    if (scu.FailureDescription.Length > 0)
                    {
                        item.FailureDescription = scu.FailureDescription;
                        scu.Status = ScuOperationStatus.Failed;
                    }

                    // Reset the WorkQueue entry status
                    if ((InstanceList.Count > 0 && sendCounter != InstanceList.Count) ||                  // not all sop were sent
                        scu.Status == ScuOperationStatus.Failed ||
                        scu.Status == ScuOperationStatus.ConnectFailed)
                    {
                        PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal);                         // failures occurred}
                    }
                    else
                    {
                        OnComplete();
                    }
                }
            }
        }
Beispiel #18
0
 protected override void PostProcessing(Model.WorkQueue item, WorkQueueProcessorStatus status, WorkQueueProcessorDatabaseUpdate resetQueueStudyState)
 {
     ProcessAdditionalFiles();
     base.PostProcessing(item, status, resetQueueStudyState);
 }
Beispiel #19
0
        private static WorkQueueDetails CreateWebMoveStudyWorkQueueItemDetails(Model.WorkQueue item)
        {
            var          deviceAdaptor       = new DeviceDataAdapter();
            var          studyStorageAdaptor = new StudyStorageAdaptor();
            StudyStorage studyStorage        = studyStorageAdaptor.Get(item.StudyStorageKey);
            var          wqUidsAdaptor       = new WorkQueueUidAdaptor();
            var          studyAdaptor        = new StudyAdaptor();
            Device       dest = deviceAdaptor.Get(item.DeviceKey);

            var detail = new WebMoveStudyWorkQueueDetails();

            detail.Key = item.GetKey();

            detail.DestinationAE     = dest == null ? string.Empty : dest.AeTitle;
            detail.StudyInstanceUid  = studyStorage == null ? string.Empty : studyStorage.StudyInstanceUid;
            detail.ScheduledDateTime = item.ScheduledTime;
            detail.ExpirationTime    = item.ExpirationTime;
            detail.InsertTime        = item.InsertTime;
            detail.FailureCount      = item.FailureCount;
            detail.Type               = item.WorkQueueTypeEnum;
            detail.Status             = item.WorkQueueStatusEnum;
            detail.Priority           = item.WorkQueuePriorityEnum;
            detail.ServerDescription  = item.ProcessorID;
            detail.FailureDescription = item.FailureDescription;

            StudyStorageLocation storage = WorkQueueController.GetLoadStorageLocation(item);

            detail.StorageLocationPath = storage.GetStudyPath();

            // Fetch UIDs
            var uidCriteria = new WorkQueueUidSelectCriteria();

            uidCriteria.WorkQueueKey.EqualTo(item.GetKey());
            IList <WorkQueueUid> uids = wqUidsAdaptor.Get(uidCriteria);

            var mapSeries = new Hashtable();

            foreach (WorkQueueUid uid in uids)
            {
                if (mapSeries.ContainsKey(uid.SeriesInstanceUid) == false)
                {
                    mapSeries.Add(uid.SeriesInstanceUid, uid.SopInstanceUid);
                }
            }

            detail.NumInstancesPending = uids.Count;
            detail.NumSeriesPending    = mapSeries.Count;


            // Fetch the study and patient info
            if (studyStorage != null)
            {
                var studycriteria = new StudySelectCriteria();
                studycriteria.StudyInstanceUid.EqualTo(studyStorage.StudyInstanceUid);
                studycriteria.ServerPartitionKey.EqualTo(item.ServerPartitionKey);
                Study study = studyAdaptor.GetFirst(studycriteria);

                // Study may not be available until the images are processed.
                if (study != null)
                {
                    var studyAssembler = new StudyDetailsAssembler();
                    detail.Study = studyAssembler.CreateStudyDetail(study);
                }
            }

            return(detail);
        }
        private void ProcessSeriesLevelDelete(Model.WorkQueue item)
        {
            // ensure the Study is loaded.
            Study study = StorageLocation.Study;

            Platform.CheckForNullReference(study, "Study record doesn't exist");

            Platform.Log(LogLevel.Info, "Processing Series Level Deletion for Study {0}, A#: {1}",
                         study.StudyInstanceUid, study.AccessionNumber);

            _seriesToDelete = new List <Series>();
            bool completed = false;

            try
            {
                // Load the list of Series to be deleted from the WorkQueueUid
                LoadUids(item);

                // Go through the list of series and add commands
                // to delete each of them. It's all or nothing.
                using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting Series from study {0}, A#:{1}, Patient: {2}, ID:{3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId)))
                {
                    StudyXml studyXml = StorageLocation.LoadStudyXml();
                    IDictionary <string, Series> existingSeries = StorageLocation.Study.Series;


                    // Add commands to delete the folders and update the xml
                    foreach (WorkQueueUid uid in WorkQueueUidList)
                    {
                        // Delete from study XML
                        if (studyXml.Contains(uid.SeriesInstanceUid))
                        {
                            //Note: DeleteDirectoryCommand  doesn't throw exception if the folder doesn't exist
                            var xmlUpdate = new RemoveSeriesFromStudyXml(studyXml, uid.SeriesInstanceUid);
                            processor.AddCommand(xmlUpdate);
                        }

                        // Delete from filesystem
                        string path = StorageLocation.GetSeriesPath(uid.SeriesInstanceUid);
                        if (Directory.Exists(path))
                        {
                            var delDir = new DeleteDirectoryCommand(path, true);
                            processor.AddCommand(delDir);
                        }
                    }

                    // flush the updated xml to disk
                    processor.AddCommand(new SaveXmlCommand(studyXml, StorageLocation));



                    // Update the db.. NOTE: these commands are executed at the end.
                    foreach (WorkQueueUid uid in WorkQueueUidList)
                    {
                        // Delete from DB
                        WorkQueueUid queueUid  = uid;
                        Series       theSeries = existingSeries[queueUid.SeriesInstanceUid];
                        if (theSeries != null)
                        {
                            _seriesToDelete.Add(theSeries);
                            var delSeries = new DeleteSeriesFromDBCommand(StorageLocation, theSeries);
                            processor.AddCommand(delSeries);
                            delSeries.Executing += DeleteSeriesFromDbExecuting;
                        }
                        else
                        {
                            // Series doesn't exist
                            Platform.Log(LogLevel.Info, "Series {0} is invalid or no longer exists", uid.SeriesInstanceUid);
                        }

                        // The WorkQueueUid must be cleared before the entry can be removed from the queue
                        var deleteUid = new DeleteWorkQueueUidCommand(uid);
                        processor.AddCommand(deleteUid);

                        // Force a re-archival if necessary
                        processor.AddCommand(new InsertArchiveQueueCommand(item.ServerPartitionKey, item.StudyStorageKey));
                    }

                    if (!processor.Execute())
                    {
                        throw new ApplicationException(
                                  String.Format("Error occurred when series from Study {0}, A#: {1}",
                                                study.StudyInstanceUid, study.AccessionNumber), processor.FailureException);
                    }
                    else
                    {
                        foreach (Series series in _seriesToDelete)
                        {
                            OnSeriesDeleted(series);
                        }
                    }
                }


                completed = true;
            }
            finally
            {
                if (completed)
                {
                    OnCompleted();
                    PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                }
                else
                {
                    PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None);
                }
            }
        }
        private void PreResetConfirmDialog_Confirmed(object data)
        {
            Hide();

            var key = data as ServerEntityKey;

            if (key != null)
            {
                var             adaptor = new WorkQueueAdaptor();
                Model.WorkQueue item    = adaptor.Get(key);
                if (item == null)
                {
                    String errorMessage = SR.WorkQueueNotAvailable;
                    EventsHelper.Fire(Error, this, new WorkQueueItemResetErrorEventArgs(errorMessage, null));
                }
                else
                {
                    var      controller    = new WorkQueueController();
                    DateTime scheduledTime = item.ScheduledTime;
                    if (scheduledTime < Platform.Time)
                    {
                        scheduledTime = Platform.Time.AddSeconds(WorkQueueSettings.Default.WorkQueueProcessDelaySeconds);
                    }

                    DateTime expirationTime = item.ExpirationTime.GetValueOrDefault(Platform.Time.AddSeconds(WorkQueueSettings.Default.WorkQueueExpireDelaySeconds));
                    if (expirationTime < scheduledTime)
                    {
                        expirationTime = scheduledTime.AddSeconds(WorkQueueSettings.Default.WorkQueueExpireDelaySeconds);
                    }

                    try
                    {
                        var items = new List <Model.WorkQueue>();
                        items.Add(item);

                        controller.ResetWorkQueueItems(items, scheduledTime, expirationTime);

                        Platform.Log(LogLevel.Info, "{0} Work Queue item reset:  Key={1}.", item.WorkQueueTypeEnum,
                                     item.GetKey());
                        if (WorkQueueItemReseted != null)
                        {
                            WorkQueueItemReseted(item);
                        }

                        if (OnHide != null)
                        {
                            OnHide();
                        }
                    }
                    catch (Exception e)
                    {
                        Platform.Log(LogLevel.Error, e, "Unable to reset {0} work queue item. Key={1}.",
                                     item.WorkQueueTypeEnum, item.GetKey());

                        String errorMessage = String.Format(SR.WorkQueueResetFailed, e.Message);

                        EventsHelper.Fire(Error, this, new WorkQueueItemResetErrorEventArgs(errorMessage, e));
                    }
                }
            }
        }
Beispiel #22
0
 public WorkQueueDetailsButtonEventArg(Model.WorkQueue item)
 {
     WorkQueueItem = item;
 }
Beispiel #23
0
 void DeleteWorkQueueDialog_WorkQueueItemDeleted(Model.WorkQueue item)
 {
     SearchPanel.Refresh();
 }
        /// <summary>
        /// The processing thread.
        /// </summary>
        /// <remarks>
        /// This method queries the database for WorkQueue entries to work on, and then uses
        /// a thread pool to process the entries.
        /// </remarks>
        public void Run()
        {
            // Force the alert to be displayed right away, if it happens
            DateTime lastLog = Platform.Time.AddMinutes(-61);

            if (!_threadPool.Active)
            {
                _threadPool.Start();
            }

            Platform.Log(LogLevel.Info, "Work Queue Processor running...");

            while (true)
            {
                if (_stop)
                {
                    return;
                }

                bool threadsAvailable = _threadPool.CanQueueItem;
                bool memoryAvailable  = WorkQueueSettings.Instance.WorkQueueMinimumFreeMemoryMB == 0
                                        ||
                                        SystemResources.GetAvailableMemory(SizeUnits.Megabytes) >
                                        WorkQueueSettings.Instance.WorkQueueMinimumFreeMemoryMB;

                if (threadsAvailable && memoryAvailable)
                {
                    try
                    {
                        Model.WorkQueue queueListItem = GetWorkQueueItem(ServerPlatform.ProcessorId);
                        if (queueListItem == null)
                        {
                            /* No result found, or reach max queue entries for each type */
                            _terminateEvent.WaitOne(WorkQueueSettings.Instance.WorkQueueQueryDelay, false);
                            continue;
                        }

                        if (!_extensions.ContainsKey(queueListItem.WorkQueueTypeEnum))
                        {
                            Platform.Log(LogLevel.Error,
                                         "No extensions loaded for WorkQueue item type: {0}.  Failing item.",
                                         queueListItem.WorkQueueTypeEnum);

                            //Just fail the WorkQueue item, not much else we can do
                            FailQueueItem(queueListItem, "No plugin to handle WorkQueue type: " + queueListItem.WorkQueueTypeEnum);
                            continue;
                        }

                        try
                        {
                            IWorkQueueProcessorFactory factory   = _extensions[queueListItem.WorkQueueTypeEnum];
                            IWorkQueueItemProcessor    processor = factory.GetItemProcessor();

                            // Enqueue the actual processing of the item to the thread pool.
                            _threadPool.Enqueue(processor, queueListItem, ExecuteProcessor);
                        }
                        catch (Exception e)
                        {
                            Platform.Log(LogLevel.Error, e, "Unexpected exception creating WorkQueue processor.");
                            FailQueueItem(queueListItem, "Failure getting WorkQueue processor: " + e.Message);
                            continue;
                        }
                    }
                    catch (Exception e)
                    {
                        // Wait for only 1.5 seconds
                        Platform.Log(LogLevel.Error, e, "Exception occured when processing WorkQueue item.");
                        _terminateEvent.WaitOne(3000, false);
                    }
                }
                else
                {
                    if ((lastLog.AddMinutes(60) < Platform.Time) && !memoryAvailable)
                    {
                        lastLog = Platform.Time;
                        Platform.Log(LogLevel.Error, "Unable to process WorkQueue entries, Minimum memory not available, minimum MB required: {0}, current MB available:{1}",
                                     WorkQueueSettings.Instance.WorkQueueMinimumFreeMemoryMB,
                                     SystemResources.GetAvailableMemory(SizeUnits.Megabytes));

                        ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Critical, "WorkQueue", AlertTypeCodes.NoResources,
                                             null, TimeSpan.Zero,
                                             "Unable to process WorkQueue entries, Minimum memory not available, minimum MB required: {0}, current MB available:{1}",
                                             WorkQueueSettings.Instance.WorkQueueMinimumFreeMemoryMB,
                                             SystemResources.GetAvailableMemory(SizeUnits.Megabytes));
                    }
                    // wait for new opening in the pool or termination
                    WaitHandle.WaitAny(new WaitHandle[] { _threadStop, _terminateEvent }, 3000, false);
                    _threadStop.Reset();
                }
            }
        }
Beispiel #25
0
        protected override void ProcessItem(Model.WorkQueue item)
        {
            LoadUids(item);

            if (WorkQueueUidList.Count == 0)
            {
                // No UIDs associated with the WorkQueue item.  Set the status back to idle
                PostProcessing(item,
                               WorkQueueProcessorStatus.Idle,
                               WorkQueueProcessorDatabaseUpdate.ResetQueueState);
                return;
            }


            XmlElement element = item.Data.DocumentElement;

            string syntax = element.Attributes["syntax"].Value;

            TransferSyntax compressSyntax = TransferSyntax.GetTransferSyntax(syntax);

            if (compressSyntax == null)
            {
                item.FailureDescription =
                    String.Format("Invalid transfer syntax in compression WorkQueue item: {0}", element.Attributes["syntax"].Value);
                Platform.Log(LogLevel.Error, "Error with work queue item {0}: {1}", item.GetKey(), item.FailureDescription);
                base.PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
                return;
            }

            if (Study == null)
            {
                item.FailureDescription =
                    String.Format("Compression item does not have a linked Study record");
                Platform.Log(LogLevel.Error, "Error with work queue item {0}: {1}", item.GetKey(), item.FailureDescription);
                base.PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
                return;
            }

            Platform.Log(LogLevel.Info,
                         "Compressing study {0} for Patient {1} (PatientId:{2} A#:{3}) on partition {4} to {5}",
                         Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                         Study.AccessionNumber, ServerPartition.Description, compressSyntax.Name);

            IDicomCodecFactory[] codecs          = DicomCodecRegistry.GetCodecFactories();
            IDicomCodecFactory   theCodecFactory = null;

            foreach (IDicomCodecFactory codec in codecs)
            {
                if (codec.CodecTransferSyntax.Equals(compressSyntax))
                {
                    theCodecFactory = codec;
                    break;
                }
            }

            if (theCodecFactory == null)
            {
                item.FailureDescription = String.Format("Unable to find codec for compression: {0}", compressSyntax.Name);
                Platform.Log(LogLevel.Error, "Error with work queue item {0}: {1}", item.GetKey(), item.FailureDescription);
                base.PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal);
                return;
            }

            if (!ProcessUidList(item, theCodecFactory))
            {
                PostProcessingFailure(item, WorkQueueProcessorFailureType.NonFatal);
            }
            else
            {
                Platform.Log(LogLevel.Info,
                             "Completed Compressing study {0} for Patient {1} (PatientId:{2} A#:{3}) on partition {4} to {5}",
                             Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                             Study.AccessionNumber, ServerPartition.Description, compressSyntax.Name);


                if (compressSyntax.LossyCompressed)
                {
                    UpdateStudyStatus(StorageLocation, StudyStatusEnum.OnlineLossy, compressSyntax);
                }
                else
                {
                    UpdateStudyStatus(StorageLocation, StudyStatusEnum.OnlineLossless, compressSyntax);
                }

                PostProcessing(item,
                               WorkQueueProcessorStatus.Pending,
                               WorkQueueProcessorDatabaseUpdate.None);                 // batch processed, not complete
            }
        }
        /// <summary>
        /// Simple routine for failing a work queue item.
        /// </summary>
        /// <param name="item">The item to fail.</param>
        /// <param name="failureDescription">The reason for the failure.</param>
        private void FailQueueItem(Model.WorkQueue item, string failureDescription)
        {
            // Must retry to reset the status of the entry in case of db error
            // Failure to do so will create stale work queue entry (stuck in "In Progress" state)
            // which can only be recovered by restarting the service.
            while (true)
            {
                try
                {
                    WorkQueueTypeProperties prop = _propertiesDictionary[item.WorkQueueTypeEnum];
                    using (IUpdateContext updateContext = _store.OpenUpdateContext(UpdateContextSyncMode.Flush))
                    {
                        IUpdateWorkQueue          update = updateContext.GetBroker <IUpdateWorkQueue>();
                        UpdateWorkQueueParameters parms  = new UpdateWorkQueueParameters
                        {
                            ProcessorID        = ServerPlatform.ProcessorId,
                            WorkQueueKey       = item.GetKey(),
                            StudyStorageKey    = item.StudyStorageKey,
                            FailureCount       = item.FailureCount + 1,
                            FailureDescription = failureDescription
                        };

                        var settings = WorkQueueSettings.Instance;
                        if ((item.FailureCount + 1) > prop.MaxFailureCount)
                        {
                            Platform.Log(LogLevel.Error,
                                         "Failing {0} WorkQueue entry ({1}), reached max retry count of {2}. Failure Reason: {3}",
                                         item.WorkQueueTypeEnum, item.GetKey(), item.FailureCount + 1, failureDescription);
                            parms.WorkQueueStatusEnum = WorkQueueStatusEnum.Failed;
                            parms.ScheduledTime       = Platform.Time;
                            parms.ExpirationTime      = Platform.Time.AddDays(1);

                            OnWorkQueueEntryFailed(item, failureDescription);
                        }
                        else
                        {
                            Platform.Log(LogLevel.Error,
                                         "Resetting {0} WorkQueue entry ({1}) to Pending, current retry count {2}. Failure Reason: {3}",
                                         item.WorkQueueTypeEnum, item.GetKey(), item.FailureCount + 1, failureDescription);
                            parms.WorkQueueStatusEnum = WorkQueueStatusEnum.Pending;
                            parms.ScheduledTime       = Platform.Time.AddMilliseconds(settings.WorkQueueQueryDelay);
                            parms.ExpirationTime      =
                                Platform.Time.AddSeconds((prop.MaxFailureCount - item.FailureCount) *
                                                         prop.FailureDelaySeconds);
                        }

                        if (false == update.Execute(parms))
                        {
                            Platform.Log(LogLevel.Error, "Unable to update {0} WorkQueue GUID: {1}", item.WorkQueueTypeEnum,
                                         item.GetKey().ToString());
                        }
                        else
                        {
                            updateContext.Commit();
                            break; // done
                        }
                    }
                }
                catch (Exception ex)
                {
                    Platform.Log(LogLevel.Error, "Error occurred when calling FailQueueItem. Retry later. {0}", ex.Message);
                    _terminateEvent.WaitOne(2000, false);
                    if (_stop)
                    {
                        Platform.Log(LogLevel.Warn, "Service is stopping. Retry to fail the entry is terminated.");
                        break;
                    }
                }
            }
        }
Beispiel #27
0
        public override void DataBind()
        {
            if (WorkQueueItemKey != null)
            {
                var adaptor = new WorkQueueAdaptor();
                _workQueue = adaptor.Get(WorkQueueItemKey);

                WorkQueueItemDetailsPanel.WorkQueue = _workQueue;

                if (_workQueue == null)
                {
                    if (!ItemNotAvailableAlertShown)
                    {
                        MessageBox.Message = SR.WorkQueueNotAvailable;
                        MessageBox.MessageType =
                            MessageBox.MessageTypeEnum.ERROR;
                        ItemNotAvailableAlertShown = true;
                    }
                }
            }
            else
            {
                ExceptionHandler.ThrowException(new WorkQueueItemNotFoundException());
            }

            base.DataBind();
        }
 private void OnWorkQueueEntryFailed(Model.WorkQueue item, string error)
 {
     RaiseAlert(item, AlertLevel.Error, error);
 }
 public WorkQueueProcessorContext(Model.WorkQueue item)
     :base(item.GetKey().Key.ToString())
 {
     _item = item;
     
 }
Beispiel #30
0
        private void PreDeleteConfirmDialog_Confirmed(object data)
        {
            Hide();

            var key = data as ServerEntityKey;

            if (key != null)
            {
                var             adaptor = new WorkQueueAdaptor();
                Model.WorkQueue item    = adaptor.Get(key);
                if (item == null)
                {
                    MessageBox.Message     = SR.WorkQueueNotAvailable;
                    MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR;
                    MessageBox.Show();
                }
                else
                {
                    if (item.WorkQueueStatusEnum == WorkQueueStatusEnum.InProgress)
                    {
                        MessageBox.Message     = SR.WorkQueueBeingProcessed_CannotDelete;
                        MessageBox.MessageType =
                            MessageBox.MessageTypeEnum.ERROR;
                        MessageBox.Show();
                        return;
                    }

                    try
                    {
                        bool successful;
                        var  controller = new WorkQueueController();
                        var  items      = new List <Model.WorkQueue>();
                        items.Add(item);

                        successful = controller.DeleteWorkQueueItems(items);
                        if (successful)
                        {
                            Platform.Log(LogLevel.Info, "Work Queue item deleted by user : Item Key={0}",
                                         item.GetKey().Key);

                            if (WorkQueueItemDeleted != null)
                            {
                                WorkQueueItemDeleted(item);
                            }

                            if (OnHide != null)
                            {
                                OnHide();
                            }
                        }
                        else
                        {
                            Platform.Log(LogLevel.Error,
                                         "PreResetConfirmDialog_Confirmed: Unable to delete work queue item. GUID={0}",
                                         item.GetKey().Key);

                            MessageBox.Message     = SR.WorkQueueDeleteFailed;
                            MessageBox.MessageType =
                                MessageBox.MessageTypeEnum.ERROR;
                            MessageBox.Show();
                        }
                    }
                    catch (Exception e)
                    {
                        Platform.Log(LogLevel.Error,
                                     "PreResetConfirmDialog_Confirmed: Unable to delete work queue item. GUID={0} : {1}",
                                     item.GetKey().Key, e.StackTrace);

                        MessageBox.Message     = String.Format(SR.WorkQueueDeleteFailed_WithException, e.Message);
                        MessageBox.MessageType = MessageBox.MessageTypeEnum.ERROR;
                        MessageBox.Show();
                    }
                }
            }
        }