protected override void ProcessItem(Model.WorkQueue item)
        {
            Platform.CheckForNullReference(item, "item");
            Platform.CheckForNullReference(item.Data, "Data column cannot be null");
            Platform.CheckForEmptyString(item.GroupID, "GroupID column cannot be null");

            _reconcileQueueData = XmlUtils.Deserialize<ProcessDuplicateQueueEntryQueueData>(WorkQueueItem.Data);

            LoadUids(item);


            if (WorkQueueUidList.Count == 0)
            {
                // cleanup
                var dir = new DirectoryInfo(_reconcileQueueData.DuplicateSopFolder);
                DirectoryUtility.DeleteIfEmpty(dir.FullName);
                if (dir.Parent != null) 
                    DirectoryUtility.DeleteIfEmpty(dir.Parent.FullName);


                Platform.Log(LogLevel.Info, "Reconcile Cleanup is completed. GUID={0}.", WorkQueueItem.GetKey());
                PostProcessing(WorkQueueItem,
                               WorkQueueProcessorStatus.Complete,
                               WorkQueueProcessorDatabaseUpdate.ResetQueueState);
            }
            else
            {
                Platform.Log(LogLevel.Info,
                             "Starting Cleanup of Duplicate item for study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}, {5} objects",
                             Study.StudyInstanceUid, Study.PatientsName, Study.PatientId,
                             Study.AccessionNumber, ServerPartition.Description,
                             WorkQueueUidList.Count);

                bool allFailed = CollectionUtils.SelectFirst(WorkQueueUidList, uid => !uid.Failed) == null;

                if (allFailed)
                {
                    FailQueueItem(item, item.FailureDescription ?? "All work queue Uid entries have failed.");
                }
                else
                {

                    Platform.Log(LogLevel.Info, "Duplicates to be removed are located in {0}", _reconcileQueueData.DuplicateSopFolder);

                    int successCount = ProcessUidList();

                    Platform.Log(LogLevel.Info, "Complete Duplicate Cleanup. GUID={0}. {1} uids deleted.", WorkQueueItem.GetKey(), successCount);
                    PostProcessing(WorkQueueItem, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None);
                }

            }
        }
        public WorkQueueProcessDuplicateSop(WorkQueue workQueue)
        {
            SetKey(workQueue.GetKey());
            Data                  = workQueue.Data;
            ExpirationTime        = workQueue.ExpirationTime;
            FailureCount          = workQueue.FailureCount;
            FailureDescription    = workQueue.FailureDescription;
            InsertTime            = workQueue.InsertTime;
            ProcessorID           = workQueue.ProcessorID;
            ScheduledTime         = workQueue.ScheduledTime;
            ServerPartitionKey    = workQueue.ServerPartitionKey;
            StudyHistoryKey       = workQueue.StudyHistoryKey;
            StudyStorageKey       = workQueue.StudyStorageKey;
            WorkQueuePriorityEnum = workQueue.WorkQueuePriorityEnum;
            WorkQueueStatusEnum   = workQueue.WorkQueueStatusEnum;
            WorkQueueTypeEnum     = workQueue.WorkQueueTypeEnum;

            _queueData = (ProcessDuplicateQueueEntryQueueData)_serializer.Deserialize(new XmlNodeReader(workQueue.Data.DocumentElement));
        }
        public WorkQueueProcessDuplicateSop(WorkQueue workQueue)
        {
            SetKey(workQueue.GetKey());
            Data = workQueue.Data;
            ExpirationTime = workQueue.ExpirationTime;
            FailureCount = workQueue.FailureCount;
            FailureDescription = workQueue.FailureDescription;
            InsertTime = workQueue.InsertTime;
            ProcessorID = workQueue.ProcessorID;
            ScheduledTime = workQueue.ScheduledTime;
            ServerPartitionKey = workQueue.ServerPartitionKey;
            StudyHistoryKey = workQueue.StudyHistoryKey;
            StudyStorageKey = workQueue.StudyStorageKey;
            WorkQueuePriorityEnum = workQueue.WorkQueuePriorityEnum;
            WorkQueueStatusEnum = workQueue.WorkQueueStatusEnum;
            WorkQueueTypeEnum = workQueue.WorkQueueTypeEnum;

            _queueData = (ProcessDuplicateQueueEntryQueueData)_serializer.Deserialize(new XmlNodeReader(workQueue.Data.DocumentElement));

        }
        /// <summary>
        /// Inserts work queue entry to process the duplicates.
        /// </summary>
        /// <param name="entryKey"><see cref="ServerEntityKey"/> of the <see cref="StudyIntegrityQueue"/> entry  that has <see cref="StudyIntegrityReasonEnum"/> equal to <see cref="StudyIntegrityReasonEnum.Duplicate"/> </param>
        /// <param name="action"></param>
        public void Process(ServerEntityKey entryKey, ProcessDuplicateAction action)
        {
            
            DuplicateSopReceivedQueue entry = DuplicateSopReceivedQueue.Load(HttpContextData.Current.ReadContext, entryKey);
            Platform.CheckTrue(entry.StudyIntegrityReasonEnum == StudyIntegrityReasonEnum.Duplicate, "Invalid type of entry");

            IList<StudyIntegrityQueueUid> uids = LoadDuplicateSopUid(entry);

            using(IUpdateContext context = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
            {
                ProcessDuplicateQueueEntryQueueData data = new ProcessDuplicateQueueEntryQueueData
                {
                    Action = action,
                    DuplicateSopFolder = entry.GetFolderPath(context),
                    UserName = ServerHelper.CurrentUserName,                                                              		
                }; 
                
                LockStudyParameters lockParms = new LockStudyParameters
                {
                    QueueStudyStateEnum = QueueStudyStateEnum.ReconcileScheduled,
                    StudyStorageKey = entry.StudyStorageKey
                };

                ILockStudy lockBbroker = context.GetBroker<ILockStudy>();
                lockBbroker.Execute(lockParms);
                if (!lockParms.Successful)
                {
                    throw new ApplicationException(lockParms.FailureReason);
                }

            	IWorkQueueProcessDuplicateSopBroker broker = context.GetBroker<IWorkQueueProcessDuplicateSopBroker>();
                WorkQueueProcessDuplicateSopUpdateColumns columns = new WorkQueueProcessDuplicateSopUpdateColumns
                                                                    	{
                                                                    		Data = XmlUtils.SerializeAsXmlDoc(data),
                                                                    		GroupID = entry.GroupID,
                                                                    		ScheduledTime = Platform.Time,
                                                                    		ExpirationTime =
                                                                    			Platform.Time.Add(TimeSpan.FromMinutes(15)),
                                                                    		ServerPartitionKey = entry.ServerPartitionKey,
                                                                    		WorkQueuePriorityEnum =
                                                                    			WorkQueuePriorityEnum.Medium,
                                                                    		StudyStorageKey = entry.StudyStorageKey,
                                                                    		WorkQueueStatusEnum = WorkQueueStatusEnum.Pending
                                                                    	};

            	WorkQueueProcessDuplicateSop processDuplicateWorkQueueEntry = broker.Insert(columns);

                IWorkQueueUidEntityBroker workQueueUidBroker = context.GetBroker<IWorkQueueUidEntityBroker>();
                IStudyIntegrityQueueUidEntityBroker duplicateUidBroke = context.GetBroker<IStudyIntegrityQueueUidEntityBroker>();
                foreach (StudyIntegrityQueueUid uid in uids)
                {
                    WorkQueueUidUpdateColumns uidColumns = new WorkQueueUidUpdateColumns
                                                           	{
                                                           		Duplicate = true,
                                                           		Extension = ServerPlatform.DuplicateFileExtension,
                                                           		SeriesInstanceUid = uid.SeriesInstanceUid,
                                                           		SopInstanceUid = uid.SopInstanceUid,
                                                           		RelativePath = uid.RelativePath,
                                                           		WorkQueueKey = processDuplicateWorkQueueEntry.GetKey()
                                                           	};

                	workQueueUidBroker.Insert(uidColumns);

                    duplicateUidBroke.Delete(uid.GetKey());
                }

                IDuplicateSopEntryEntityBroker duplicateEntryBroker =
                    context.GetBroker<IDuplicateSopEntryEntityBroker>();
                duplicateEntryBroker.Delete(entry.GetKey());


                context.Commit();
            }
        }
 private void UpdateQueueData()
 {
     using(IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush))
     {
         // make a copy of the current queue data with updated info
         ProcessDuplicateQueueEntryQueueData data = new ProcessDuplicateQueueEntryQueueData
                                                        {
                                                            Action = _processDuplicateEntry.QueueData.Action,
                                                            DuplicateSopFolder = _processDuplicateEntry.QueueData.DuplicateSopFolder,
                                                            UserName = _processDuplicateEntry.QueueData.UserName,
                                                            State = new ProcessDuplicateQueueState
                                                                        {
                                                                            HistoryLogged = HistoryLogged,
                                                                            ExistingStudyUpdated = _processDuplicateEntry.QueueData.State.ExistingStudyUpdated
                                                                        }                                                                               
                                                        };
         
         // update the queue data in db
         IWorkQueueEntityBroker broker = ctx.GetBroker<IWorkQueueEntityBroker>();
         WorkQueueUpdateColumns parameters = new WorkQueueUpdateColumns
                                                 {
                                                     Data = XmlUtils.SerializeAsXmlDoc(data)
                                                 };
         if (broker.Update(WorkQueueItem.Key, parameters))
         {
             ctx.Commit();
             HistoryLogged = _processDuplicateEntry.QueueData.State.HistoryLogged = true;
         }                
     }
 }