/// <summary>
        /// Sets the document error.
        /// </summary>
        /// <param name="documentErrorCollection">The document error collection.</param>
        /// <param name="documentDetail">The document detail.</param>
        /// <param name="logInfo">The log information.</param>
        private static void SetDocumentError(DocumentErrorCollection documentErrorCollection, DocumentDetail documentDetail,
                                             SearchIndexLogInfo logInfo)
        {
            if (documentErrorCollection == null || documentErrorCollection.FailedDocumentCount == 0)
            {
                return;
            }
            var documentError = documentErrorCollection.DocumentErrors.FirstOrDefault(
                d => documentDetail.document.DocumentId.Equals(d.Id, StringComparison.CurrentCultureIgnoreCase));

            if (documentError != null)
            {
                logInfo.Message = documentError.ErrorMessage;
            }
        }
 /// <summary>
 /// Sets the document error.
 /// </summary>
 /// <param name="documentErrorCollection">The document error collection.</param>
 /// <param name="documentDetail">The document detail.</param>
 /// <param name="logInfo">The log information.</param>
 private static void SetDocumentError(DocumentErrorCollection documentErrorCollection, DocumentDetail documentDetail,
     SearchIndexLogInfo logInfo)
 {
     if (documentErrorCollection == null||documentErrorCollection.FailedDocumentCount==0) return;
     var documentError = documentErrorCollection.DocumentErrors.FirstOrDefault(
         d => documentDetail.document.DocumentId.Equals(d.Id, StringComparison.CurrentCultureIgnoreCase));
     if (documentError != null)
         logInfo.Message = documentError.ErrorMessage;
 }
        /// <summary>
        /// Sends the log.
        /// </summary>
        /// <param name="documentCollection">The document collection.</param>
        /// <param name="isSentForIndexing">if set to <c>true</c> [is sent for indexing].</param>
        /// <param name="documentErrorCollection"></param>
        private  void SendLog(DocumentCollection documentCollection, bool isSentForIndexing,DocumentErrorCollection documentErrorCollection=null)
        {
            if (documentCollection == null || documentCollection.documents == null) return;
            var message = isSentForIndexing ? "Sent for indexing." : "Failed to send for indexing.";
            var nativeDocumentList =
                documentCollection.documents.FindAll(
                    n => n.docType == DocumentsetType.NativeSet);
            if (!nativeDocumentList.Any()) return;

            var searchIndexLogInfos = new List<JobWorkerLog<SearchIndexLogInfo>>();
            try
            {
                foreach (var documentDetail in nativeDocumentList)
                {
                    if (documentDetail.document == null) continue;
                    var logInfo = new SearchIndexLogInfo
                    {
                        Information =
                            string.Format("DCN:{0}", documentDetail.document.DocumentControlNumber),
                        DocumentId = documentDetail.document.DocumentId,
                        DCNNumber = documentDetail.document.DocumentControlNumber,
                        CrossReferenceField = documentDetail.document.CrossReferenceFieldValue,
                        Message = message
                    };
                    SetDocumentError(documentErrorCollection, documentDetail, logInfo);
                    if (String.IsNullOrEmpty(documentDetail.CorrelationId))
                    {
                        documentDetail.CorrelationId = "0";
                    }
                    var searchIndexLogInfo = new JobWorkerLog<SearchIndexLogInfo>
                    {
                        JobRunId = Convert.ToInt32(PipelineId),
                        CorrelationId = long.Parse(documentDetail.CorrelationId),
                        WorkerInstanceId = WorkerId,
                        WorkerRoleType = "8A65E2DC-753C-E311-82FA-005056850057",
                        Success = isSentForIndexing,
                        LogInfo = logInfo
                    };


                    searchIndexLogInfos.Add(searchIndexLogInfo);
                }
                LogPipe.Open();
                var pipleMessageEnvelope = new PipeMessageEnvelope
                {
                    Body = searchIndexLogInfos
                };
                LogPipe.Send(pipleMessageEnvelope);
            }
            catch (Exception exception)
            {
                exception.AddDbgMsg("Failed to log document details");
                exception.Trace().Swallow();
                ReportToDirector(exception);
            }
        }
        /// <summary>
        /// Processes the work item.
        /// </summary>
        /// <param name="message">The message.</param>
        protected override void ProcessMessage(PipeMessageEnvelope message)
        {
            var documentCollection = message.Body as DocumentCollection;
            if (documentCollection == null || documentCollection.documents == null)
            {
                Tracer.Warning("Indexing worker receives empty batch");
                return;
            }
            var documentErrorCollection = new DocumentErrorCollection();
            try
            {
                #region Assertion
                documentCollection.ShouldNotBe(null);
                documentCollection.dataset.ShouldNotBe(null);

                #endregion

                if (documentCollection.documents != null && documentCollection.documents.Any())
                //This check done to avoid null reference exception in MatterDAO.GetMatterDetails
                {
                    var isDeleteTags = documentCollection.IsDeleteTagsForOverlay;
                    if (null == _mMatter)
                    {
                        _mMatter = documentCollection.dataset.Matter;
                     
                    }

                    // Initialize the instance of IndexBO
                    _indexManagerProxy = new IndexManagerProxy(_mMatter.FolderID, documentCollection.dataset.CollectionId);

                    using (new EVTransactionScope(TransactionScopeOption.Suppress))
                    {
                        #region Insert

                        var nativeDocumentListInsert =
                            documentCollection.documents.FindAll(
                                n => n.docType == DocumentsetType.NativeSet && n.IsNewDocument);
                        if (nativeDocumentListInsert.Any())
                        {
                            //// Adding imageset documents.
                            
                            AppendImagesetIdentifiers(ref nativeDocumentListInsert, documentCollection);

                            
                           documentErrorCollection= _indexManagerProxy.BulkIndexDocuments(nativeDocumentListInsert.ToDocumentList());
                        }

                        #endregion

                        #region Update

                        #region "Is Not Same Content File

                        var nativeDocumentListUpdate =
                            documentCollection.documents.FindAll(
                                n =>
                                    n.docType == DocumentsetType.NativeSet && !n.IsNewDocument &&
                                    n.OverlayIsNewContentFile).ToList();

                        if (nativeDocumentListUpdate.Any())
                        {
                            //TODO : Verify - delete and recreate is necessary, in case of overlay
                            //TODO: Search Engine Replacement - Search Sub System - Implement batch delete of documents from index 
                            ConstructOverlayDocuments(documentCollection, nativeDocumentListUpdate,
                                documentCollection.IsDeleteTagsForOverlay);

                            //TODO: Search Engine Replacement - Search Sub System - Ensure appropriate annotations happens in search index
                            //TODO: Search Engine Replacement - Search Sub System - Ensure tags are updated for the documents in search index

                            AssignReviewsetIdentifiers(ref nativeDocumentListUpdate);
                            AssignImagesetIdentifiers(ref nativeDocumentListUpdate, documentCollection);
                            AssignTagIdentifiers(ref nativeDocumentListUpdate, isDeleteTags);

                            //TODO: Search Engine Replacement - Search Sub System - Implement to ingest batch of documents into search index
                            _indexManagerProxy.BulkUpdateDocumentsAsync(nativeDocumentListUpdate.ToDocumentList());
                        }

                        #endregion

                        #region Same Content File

                        var nativeDocumentListOverlayInsert =
                            documentCollection.documents.FindAll(
                                n =>
                                    n.docType == DocumentsetType.NativeSet && !n.IsNewDocument &&
                                    !n.OverlayIsNewContentFile).ToList();

                        if (nativeDocumentListOverlayInsert.Any())
                        {
                            var imageDocumentList =
                                documentCollection.documents.FindAll(i => i.docType == DocumentsetType.ImageSet);
                            var imagesetId = string.Empty;
                            if (imageDocumentList.Any())
                            {
                                imagesetId = imageDocumentList.First().document.CollectionId.ToLower();
                            }

                            if (documentCollection.IsIncludeNativeFile || documentCollection.IsDeleteTagsForOverlay ||
                                !string.IsNullOrWhiteSpace(imagesetId))
                            {
                                foreach (var doc in nativeDocumentListOverlayInsert)
                                {
                                    //Initializes the tag and redactable field values
                                    ResetRedactableFields(documentCollection, imagesetId, doc);
                                }
                            }

                            AssignImagesetIdentifiers(ref nativeDocumentListOverlayInsert, documentCollection);
                            AssignTagIdentifiers(ref nativeDocumentListOverlayInsert, isDeleteTags);

                            //bulk documents indexing.
                            _indexManagerProxy.BulkUpdateDocumentsAsync(
                                nativeDocumentListOverlayInsert.ToDocumentList());
                        }

                        #endregion

                        #endregion


                        #region "Update document Index status and contenet size in DB"
                        var includedDocuments =
                       documentCollection.documents.FindAll(
                           n =>
                               n.docType == DocumentsetType.NativeSet).ToList();
                        BulkUpsertDocumentContentSizeAndIndexStatusInfo(_mMatter.FolderID,
                            documentCollection.dataset.CollectionId, includedDocuments);
                        #endregion
                    }
                    Send(documentCollection);
                    SendLog(documentCollection, true, documentErrorCollection);
                }
            }
            catch (Exception ex)
            {
                ex.AddDbgMsg("Problem in indexing the documents in search sub system");
                ex.Trace().Swallow();
                ReportToDirector(ex);
                SendLog(documentCollection, false);
            }

            IncreaseProcessedDocumentsCount(documentCollection.documents.Count);
            // Debug
            //Tracer.Warning("Indexing worker handled {0} documents", documentCollection.documents.Count);
        }
        /// <summary>
        /// Sends the log.
        /// </summary>
        /// <param name="documentCollection">The document collection.</param>
        /// <param name="isSentForIndexing">if set to <c>true</c> [is sent for indexing].</param>
        /// <param name="documentErrorCollection"></param>
        private void SendLog(DocumentCollection documentCollection, bool isSentForIndexing, DocumentErrorCollection documentErrorCollection = null)
        {
            if (documentCollection == null || documentCollection.documents == null)
            {
                return;
            }
            var message            = isSentForIndexing ? "Sent for indexing." : "Failed to send for indexing.";
            var nativeDocumentList =
                documentCollection.documents.FindAll(
                    n => n.docType == DocumentsetType.NativeSet);

            if (!nativeDocumentList.Any())
            {
                return;
            }

            var searchIndexLogInfos = new List <JobWorkerLog <SearchIndexLogInfo> >();

            try
            {
                foreach (var documentDetail in nativeDocumentList)
                {
                    if (documentDetail.document == null)
                    {
                        continue;
                    }
                    var logInfo = new SearchIndexLogInfo
                    {
                        Information =
                            string.Format("DCN:{0}", documentDetail.document.DocumentControlNumber),
                        DocumentId          = documentDetail.document.DocumentId,
                        DCNNumber           = documentDetail.document.DocumentControlNumber,
                        CrossReferenceField = documentDetail.document.CrossReferenceFieldValue,
                        Message             = message
                    };
                    SetDocumentError(documentErrorCollection, documentDetail, logInfo);
                    if (String.IsNullOrEmpty(documentDetail.CorrelationId))
                    {
                        documentDetail.CorrelationId = "0";
                    }
                    var searchIndexLogInfo = new JobWorkerLog <SearchIndexLogInfo>
                    {
                        JobRunId         = Convert.ToInt32(PipelineId),
                        CorrelationId    = long.Parse(documentDetail.CorrelationId),
                        WorkerInstanceId = WorkerId,
                        WorkerRoleType   = "8A65E2DC-753C-E311-82FA-005056850057",
                        Success          = isSentForIndexing,
                        LogInfo          = logInfo
                    };


                    searchIndexLogInfos.Add(searchIndexLogInfo);
                }
                LogPipe.Open();
                var pipleMessageEnvelope = new PipeMessageEnvelope
                {
                    Body = searchIndexLogInfos
                };
                LogPipe.Send(pipleMessageEnvelope);
            }
            catch (Exception exception)
            {
                exception.AddDbgMsg("Failed to log document details");
                exception.Trace().Swallow();
                ReportToDirector(exception);
            }
        }
        /// <summary>
        /// Processes the work item.
        /// </summary>
        /// <param name="message">The message.</param>
        protected override void ProcessMessage(PipeMessageEnvelope message)
        {
            var documentCollection = message.Body as DocumentCollection;

            if (documentCollection == null || documentCollection.documents == null)
            {
                Tracer.Warning("Indexing worker receives empty batch");
                return;
            }
            var documentErrorCollection = new DocumentErrorCollection();

            try
            {
                #region Assertion
                documentCollection.ShouldNotBe(null);
                documentCollection.dataset.ShouldNotBe(null);

                #endregion

                if (documentCollection.documents != null && documentCollection.documents.Any())
                //This check done to avoid null reference exception in MatterDAO.GetMatterDetails
                {
                    var isDeleteTags = documentCollection.IsDeleteTagsForOverlay;
                    if (null == _mMatter)
                    {
                        _mMatter = documentCollection.dataset.Matter;
                    }

                    // Initialize the instance of IndexBO
                    _indexManagerProxy = new IndexManagerProxy(_mMatter.FolderID, documentCollection.dataset.CollectionId);

                    using (new EVTransactionScope(TransactionScopeOption.Suppress))
                    {
                        #region Insert

                        var nativeDocumentListInsert =
                            documentCollection.documents.FindAll(
                                n => n.docType == DocumentsetType.NativeSet && n.IsNewDocument);
                        if (nativeDocumentListInsert.Any())
                        {
                            //// Adding imageset documents.

                            AppendImagesetIdentifiers(ref nativeDocumentListInsert, documentCollection);


                            documentErrorCollection = _indexManagerProxy.BulkIndexDocuments(nativeDocumentListInsert.ToDocumentList());
                        }

                        #endregion

                        #region Update

                        #region "Is Not Same Content File

                        var nativeDocumentListUpdate =
                            documentCollection.documents.FindAll(
                                n =>
                                n.docType == DocumentsetType.NativeSet && !n.IsNewDocument &&
                                n.OverlayIsNewContentFile).ToList();

                        if (nativeDocumentListUpdate.Any())
                        {
                            //TODO : Verify - delete and recreate is necessary, in case of overlay
                            //TODO: Search Engine Replacement - Search Sub System - Implement batch delete of documents from index
                            ConstructOverlayDocuments(documentCollection, nativeDocumentListUpdate,
                                                      documentCollection.IsDeleteTagsForOverlay);

                            //TODO: Search Engine Replacement - Search Sub System - Ensure appropriate annotations happens in search index
                            //TODO: Search Engine Replacement - Search Sub System - Ensure tags are updated for the documents in search index

                            AssignReviewsetIdentifiers(ref nativeDocumentListUpdate);
                            AssignImagesetIdentifiers(ref nativeDocumentListUpdate, documentCollection);
                            AssignTagIdentifiers(ref nativeDocumentListUpdate, isDeleteTags);

                            //TODO: Search Engine Replacement - Search Sub System - Implement to ingest batch of documents into search index
                            _indexManagerProxy.BulkUpdateDocumentsAsync(nativeDocumentListUpdate.ToDocumentList());
                        }

                        #endregion

                        #region Same Content File

                        var nativeDocumentListOverlayInsert =
                            documentCollection.documents.FindAll(
                                n =>
                                n.docType == DocumentsetType.NativeSet && !n.IsNewDocument &&
                                !n.OverlayIsNewContentFile).ToList();

                        if (nativeDocumentListOverlayInsert.Any())
                        {
                            var imageDocumentList =
                                documentCollection.documents.FindAll(i => i.docType == DocumentsetType.ImageSet);
                            var imagesetId = string.Empty;
                            if (imageDocumentList.Any())
                            {
                                imagesetId = imageDocumentList.First().document.CollectionId.ToLower();
                            }

                            if (documentCollection.IsIncludeNativeFile || documentCollection.IsDeleteTagsForOverlay ||
                                !string.IsNullOrWhiteSpace(imagesetId))
                            {
                                foreach (var doc in nativeDocumentListOverlayInsert)
                                {
                                    //Initializes the tag and redactable field values
                                    ResetRedactableFields(documentCollection, imagesetId, doc);
                                }
                            }

                            AssignImagesetIdentifiers(ref nativeDocumentListOverlayInsert, documentCollection);
                            AssignTagIdentifiers(ref nativeDocumentListOverlayInsert, isDeleteTags);

                            //bulk documents indexing.
                            _indexManagerProxy.BulkUpdateDocumentsAsync(
                                nativeDocumentListOverlayInsert.ToDocumentList());
                        }

                        #endregion

                        #endregion


                        #region "Update document Index status and contenet size in DB"
                        var includedDocuments =
                            documentCollection.documents.FindAll(
                                n =>
                                n.docType == DocumentsetType.NativeSet).ToList();
                        BulkUpsertDocumentContentSizeAndIndexStatusInfo(_mMatter.FolderID,
                                                                        documentCollection.dataset.CollectionId, includedDocuments);
                        #endregion
                    }
                    Send(documentCollection);
                    SendLog(documentCollection, true, documentErrorCollection);
                }
            }
            catch (Exception ex)
            {
                ex.AddDbgMsg("Problem in indexing the documents in search sub system");
                ex.Trace().Swallow();
                ReportToDirector(ex);
                SendLog(documentCollection, false);
            }

            IncreaseProcessedDocumentsCount(documentCollection.documents.Count);
            // Debug
            //Tracer.Warning("Indexing worker handled {0} documents", documentCollection.documents.Count);
        }