/// <summary> /// Unassigns the reviewset id for the list of documents from search server /// </summary> /// <param name="documents"></param> /// <param name="reviewset">ReviewsetRecord</param> private void UnAssignReviewsetInSearchIndex(List <ReviewsetDocumentBEO> documents, ReviewsetRecord reviewset) { const int batchSize = 1000; var processedCount = 0; var fields = new Dictionary <string, string> { { EVSystemFields.ReviewSetId, reviewset.SplitReviewSetId }, { EVSystemFields.BinderId, reviewset.BinderId } }; var indexManagerProxy = new IndexManagerProxy(reviewset.MatterId, reviewset.CollectionId); while (processedCount != documents.Count) { List <ReviewsetDocumentBEO> tmpDocuments; if ((documents.Count - processedCount) < batchSize) { tmpDocuments = documents.Skip(processedCount).Take(documents.Count - processedCount).ToList(); processedCount += documents.Count - processedCount; } else { tmpDocuments = documents.Skip(processedCount).Take(batchSize).ToList(); processedCount += batchSize; } var docs = tmpDocuments.Select(doc => new DocumentBeo() { Id = doc.DocumentId, Fields = fields }).ToList(); indexManagerProxy.BulkUnAssignFields(docs); } }
/// <summary> /// Update the binder >> not_reviewed tag for the documents /// </summary> /// <param name="reviewsetRecord"></param> private void UpdateTag(DocumentRecordCollection reviewsetRecord) { var indexManagerProxy = new IndexManagerProxy(reviewsetRecord.ReviewsetDetails.MatterId, reviewsetRecord.ReviewsetDetails.CollectionId); var documentList = reviewsetRecord.DocumentTags. GroupBy(item => item.DocumentId).ToDictionary(group => group.Key, group => group.ToList()); var tagsList = new Dictionary <string, KeyValuePair <string, string> >(); // create key value pair for every document to be updated in Search Sub System foreach (var document in documentList) { var strTags = string.Join(",", document.Value.Where(x => x.Status == 1). Select(t => String.Format(EVSearchSyntax.TagValueFormat + "{0}", t.TagId)).ToArray()); tagsList.Add(document.Key, string.IsNullOrEmpty(strTags) ? new KeyValuePair <string, string>(EVSystemFields.Tag, string.Empty) : new KeyValuePair <string, string>(EVSystemFields.Tag, strTags)); } const int batchSize = 1000; var processedCount = 0; while (processedCount != tagsList.Count) { Dictionary <string, KeyValuePair <string, string> > batchTags; if ((tagsList.Count - processedCount) < batchSize) { batchTags = tagsList.Skip(processedCount).Take(tagsList.Count - processedCount).ToDictionary(x => x.Key, x => x.Value); processedCount += tagsList.Count - processedCount; } else { batchTags = tagsList.Skip(processedCount).Take(batchSize).ToDictionary(x => x.Key, x => x.Value); processedCount += batchSize; } if (!batchTags.Any()) { continue; } var docs = batchTags.Select(doc => new DocumentBeo() { Id = doc.Key, Fields = new Dictionary <string, string> { { doc.Value.Key, doc.Value.Value } } }).ToList(); indexManagerProxy.BulkUpdateDocumentsAsync(docs); } }
/// <summary> /// Process the document for production fields /// </summary> /// <param name="lstProductionDocuments"></param> private void ProcessDocumentFields(List <ProductionDocumentDetail> lstProductionDocuments) { if (lstProductionDocuments.Any()) { var lstBatesAndDpnFields = new List <DocumentFieldsBEO>(); var indexDocFields = new Dictionary <string, List <KeyValuePair <string, string> > >(); foreach (var productionDocument in lstProductionDocuments) { if (_documentDetails.ContainsKey(productionDocument.DocumentId)) { productionDocument.Fields = _documentDetails[productionDocument.DocumentId]; } productionDocument.DocumentFields = _documentDetails; var lstDocFields = ConstructDocumentFields(productionDocument); if (lstDocFields != null && lstDocFields.Any()) { lstBatesAndDpnFields.AddRange(lstDocFields); ConstructIndexFields(productionDocument, lstDocFields, ref indexDocFields); } } if (lstBatesAndDpnFields.Any()) { var status = _documentVaultMngr.BulkAddOrUpdateDocumentFields(_mMatterId, _mCollectionId, lstBatesAndDpnFields); if (status) { var indexManagerProxy = new IndexManagerProxy(Int64.Parse(_mMatterId), _mCollectionId); var docs = new List <DocumentBeo>(); if (indexDocFields.Any()) { docs.AddRange(from doc in indexDocFields let fields = doc.Value.ToDictionary <KeyValuePair <string, string>, string, string> (field => field.Key, field => field.Value) select new DocumentBeo() { Id = doc.Key, Fields = fields }); } indexManagerProxy.BulkUpdateDocumentsAsync(docs); Tracer.Trace("{0} fields calling the method UpdatesBatesFields", indexDocFields.Count()); } } } Send(lstProductionDocuments); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (NearDuplicationJobBEO)XmlUtility.DeserializeObject(BootParameters, typeof(NearDuplicationJobBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); _jobParameter.CollectionId.ShouldNotBeEmpty(); #endregion _dataset = GetDatasetDetails(_jobParameter.DatasetId, _jobParameter.MatterId.ToString(CultureInfo.InvariantCulture)); _dataset.ShouldNotBe(null); _indexManagerProxy = new IndexManagerProxy(_dataset.Matter.FolderID, _dataset.CollectionId); SetNearDuplicationFieldId(); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (NearDuplicationJobBEO) XmlUtility.DeserializeObject(BootParameters, typeof (NearDuplicationJobBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); _jobParameter.CollectionId.ShouldNotBeEmpty(); #endregion _dataset = GetDatasetDetails(_jobParameter.DatasetId, _jobParameter.MatterId.ToString(CultureInfo.InvariantCulture)); _dataset.ShouldNotBe(null); _indexManagerProxy = new IndexManagerProxy(_dataset.Matter.FolderID,_dataset.CollectionId); SetNearDuplicationFieldId(); }
/// <summary> /// Helper method to update the reviewset identifiers in search index /// </summary> /// <param name="documentRecords"></param> private void UpdateDocuments(DocumentRecordCollection documentRecords) { documentRecords.ReviewsetDetails.ReviewSetId.ShouldNotBeEmpty(); var indexManagerProxy = new IndexManagerProxy (documentRecords.ReviewsetDetails.MatterId, documentRecords.ReviewsetDetails.CollectionId); var fields = new Dictionary <string, string> { { EVSystemFields.ReviewSetId, documentRecords.ReviewsetDetails.ReviewSetId }, { EVSystemFields.BinderId, documentRecords.ReviewsetDetails.BinderId } }; const int batchSize = 1000; var processedCount = 0; while (processedCount != documentRecords.Documents.Count) { List <DocumentIdentityRecord> documents; if ((documentRecords.Documents.Count - processedCount) < batchSize) { documents = documentRecords.Documents.Skip(processedCount) .Take(documentRecords.Documents.Count - processedCount) .ToList(); processedCount += documentRecords.Documents.Count - processedCount; } else { documents = documentRecords.Documents.Skip(processedCount).Take(batchSize).ToList(); processedCount += batchSize; } var docs = documents.Select(doc => new DocumentBeo() { Id = doc.DocumentId, Fields = fields }).ToList(); indexManagerProxy.BulkAppendFields(docs); } }
/// <summary> /// This method helps to perform each task generated from GenerateTask method /// </summary> protected override bool DoAtomicWork(DeduplicationJobTaskBEO doAtomicTask, DeduplicationJobBEO jobParameters) { var isFailed = false; List <CollectionFieldEntity> lsColFieldEntity = null; try { if (doAtomicTask.IsDelete) { try { DocumentBO.BatchDelete(doAtomicTask.MatterId, doAtomicTask.CollectionId, doAtomicTask.DeleteDocumentList); _successCount += doAtomicTask.DeleteDocumentList.Count; } catch (Exception ex) { isFailed = true; _failureCount += doAtomicTask.DeleteDocumentList.Count; EvLog.WriteEntry( jobParameters.JobId + Constants.DO_ATOMIC_TASK + ":" + Constants.DO_ATOMIC_ERR_DEL_DUP_DOC_VAULT, ex.Message); LogException(_jobid, ex, Constants.DO_ATOMIC_TASK + ":" + Constants.DO_ATOMIC_ERR_DEL_DUP_DOC_VAULT, LogCategory.Task, _task.TaskKey, ErrorCodes.ProblemInDoAtomicWork); } } else if (doAtomicTask.IsGroup) { try { lsColFieldEntity = _vault.GetCollectionFields(DataSetCollectionId, EVSystemFields.Duplicate); } catch (Exception ex) { isFailed = true; EvLog.WriteEntry( jobParameters.JobId + Constants.DO_ATOMIC_TASK + ":" + Constants.DO_ATOMIC_ERR_GET_COL_FIELD, ex.Message); LogException(_jobid, ex, Constants.DO_ATOMIC_TASK + ":" + Constants.DO_ATOMIC_ERR_GET_COL_FIELD, LogCategory.Task, _task.TaskKey, ErrorCodes.ProblemInDoAtomicWork); } var document = _vault.GetDocumentMasterData(new Guid(doAtomicTask.CollectionId), doAtomicTask.DocumentReferenceId); #region "Insert Fields for Document in vault" if (lsColFieldEntity != null) { var documentFieldEntity = new DocumentFieldEntity { CreatedBy = document.CreatedBy, DocumentReferenceId = document.DocumentReferenceId, CollectionId = document.CollectionId, FieldId = lsColFieldEntity[0].FieldId, FieldValue = doAtomicTask.DuplicateField }; try { _vault.CreateDocumentField(documentFieldEntity); } catch (Exception ex) { EvLog.WriteEntry(jobParameters.JobId + Constants.DO_ATOMIC_TASK, Constants.DO_ATOMIC_ERR_INS_DUP_FIELD_VAULT + ex.Message); } } #endregion document.CollectionId.ShouldNotBe(Guid.Empty); var rvwDocumentBeo = new RVWDocumentBEO { DocumentId = document.DocumentReferenceId }; rvwDocumentBeo.CollectionId = document.CollectionId.ToString(); rvwDocumentBeo.DuplicateId = doAtomicTask.HashValue; rvwDocumentBeo.MatterId = Convert.ToInt64(doAtomicTask.MatterId); DocumentBO.UpdateDuplicateId(rvwDocumentBeo); #region "Insert Fields for Document into search sub-system" var fieldValues = new List <KeyValuePair <string, string> > { new KeyValuePair <string, string>(EVSystemFields.Duplicate.ToLower(), doAtomicTask.DuplicateField), new KeyValuePair <string, string>(EVSystemFields.DuplicateId.ToLower(), doAtomicTask.HashValue) }; var indexManagerProxy = new IndexManagerProxy(Convert.ToInt64(doAtomicTask.MatterId), doAtomicTask.CollectionId); var documentBeos = new List <DocumentBeo> { DocumentBO.ToDocumentBeo(document.DocumentReferenceId, fieldValues) }; indexManagerProxy.BulkUpdateDocumentsAsync(documentBeos); #endregion } } catch (Exception exp) { EvLog.WriteEntry(jobParameters.JobId + Constants.EVENT_DO_ATOMIC_WORK_EXCEPTION_VALUE, exp.Message, EventLogEntryType.Error); LogException(_jobid, exp, Constants.EVENT_DO_ATOMIC_WORK_EXCEPTION_VALUE, LogCategory.Task, _task.TaskKey, ErrorCodes.ProblemInDoAtomicWork); isFailed = true; } return(!isFailed); }
/// <summary> /// Update the binder >> not_reviewed tag for the documents /// </summary> /// <param name="reviewsetRecord"></param> private void UpdateTag(DocumentRecordCollection reviewsetRecord) { var indexManagerProxy = new IndexManagerProxy(reviewsetRecord.ReviewsetDetails.MatterId, reviewsetRecord.ReviewsetDetails.CollectionId); var documentList = reviewsetRecord.DocumentTags. GroupBy(item => item.DocumentId).ToDictionary(group => group.Key, group => group.ToList()); var tagsList = new Dictionary<string, KeyValuePair<string, string>>(); // create key value pair for every document to be updated in Search Sub System foreach (var document in documentList) { var strTags = string.Join(",", document.Value.Where(x => x.Status == 1). Select(t => String.Format(EVSearchSyntax.TagValueFormat + "{0}", t.TagId)).ToArray()); tagsList.Add(document.Key, string.IsNullOrEmpty(strTags) ? new KeyValuePair<string, string>(EVSystemFields.Tag, string.Empty) : new KeyValuePair<string, string>(EVSystemFields.Tag, strTags)); } const int batchSize = 1000; var processedCount = 0; while (processedCount != tagsList.Count) { Dictionary<string, KeyValuePair<string, string>> batchTags; if ((tagsList.Count - processedCount) < batchSize) { batchTags = tagsList.Skip(processedCount).Take(tagsList.Count - processedCount).ToDictionary(x => x.Key, x => x.Value); processedCount += tagsList.Count - processedCount; } else { batchTags = tagsList.Skip(processedCount).Take(batchSize).ToDictionary(x => x.Key, x => x.Value); processedCount += batchSize; } if (!batchTags.Any()) continue; var docs = batchTags.Select(doc => new DocumentBeo() { Id = doc.Key, Fields = new Dictionary<string, string> { { doc.Value.Key, doc.Value.Value } } }).ToList(); indexManagerProxy.BulkUpdateDocumentsAsync(docs); } }
/// <summary> /// Helper method to update the reviewset identifiers in search index /// </summary> /// <param name="documentRecords"></param> private void UpdateDocuments(DocumentRecordCollection documentRecords) { documentRecords.ReviewsetDetails.ReviewSetId.ShouldNotBeEmpty(); var indexManagerProxy = new IndexManagerProxy (documentRecords.ReviewsetDetails.MatterId, documentRecords.ReviewsetDetails.CollectionId); var fields = new Dictionary<string, string> { {EVSystemFields.ReviewSetId, documentRecords.ReviewsetDetails.ReviewSetId}, {EVSystemFields.BinderId, documentRecords.ReviewsetDetails.BinderId} }; const int batchSize = 1000; var processedCount = 0; while (processedCount != documentRecords.Documents.Count) { List<DocumentIdentityRecord> documents; if ((documentRecords.Documents.Count - processedCount) < batchSize) { documents = documentRecords.Documents.Skip(processedCount) .Take(documentRecords.Documents.Count - processedCount) .ToList(); processedCount += documentRecords.Documents.Count - processedCount; } else { documents = documentRecords.Documents.Skip(processedCount).Take(batchSize).ToList(); processedCount += batchSize; } var docs = documents.Select(doc => new DocumentBeo() { Id = doc.DocumentId, Fields = fields }).ToList(); indexManagerProxy.BulkAppendFields(docs); } }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { var documentCollection = message.Body as DocumentCollection; if (documentCollection == null || documentCollection.documents == null) { Tracer.Warning("Indexing worker receives empty batch"); return; } var documentErrorCollection = new DocumentErrorCollection(); try { #region Assertion documentCollection.ShouldNotBe(null); documentCollection.dataset.ShouldNotBe(null); #endregion if (documentCollection.documents != null && documentCollection.documents.Any()) //This check done to avoid null reference exception in MatterDAO.GetMatterDetails { var isDeleteTags = documentCollection.IsDeleteTagsForOverlay; if (null == _mMatter) { _mMatter = documentCollection.dataset.Matter; } // Initialize the instance of IndexBO _indexManagerProxy = new IndexManagerProxy(_mMatter.FolderID, documentCollection.dataset.CollectionId); using (new EVTransactionScope(TransactionScopeOption.Suppress)) { #region Insert var nativeDocumentListInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && n.IsNewDocument); if (nativeDocumentListInsert.Any()) { //// Adding imageset documents. AppendImagesetIdentifiers(ref nativeDocumentListInsert, documentCollection); documentErrorCollection= _indexManagerProxy.BulkIndexDocuments(nativeDocumentListInsert.ToDocumentList()); } #endregion #region Update #region "Is Not Same Content File var nativeDocumentListUpdate = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListUpdate.Any()) { //TODO : Verify - delete and recreate is necessary, in case of overlay //TODO: Search Engine Replacement - Search Sub System - Implement batch delete of documents from index ConstructOverlayDocuments(documentCollection, nativeDocumentListUpdate, documentCollection.IsDeleteTagsForOverlay); //TODO: Search Engine Replacement - Search Sub System - Ensure appropriate annotations happens in search index //TODO: Search Engine Replacement - Search Sub System - Ensure tags are updated for the documents in search index AssignReviewsetIdentifiers(ref nativeDocumentListUpdate); AssignImagesetIdentifiers(ref nativeDocumentListUpdate, documentCollection); AssignTagIdentifiers(ref nativeDocumentListUpdate, isDeleteTags); //TODO: Search Engine Replacement - Search Sub System - Implement to ingest batch of documents into search index _indexManagerProxy.BulkUpdateDocumentsAsync(nativeDocumentListUpdate.ToDocumentList()); } #endregion #region Same Content File var nativeDocumentListOverlayInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && !n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListOverlayInsert.Any()) { var imageDocumentList = documentCollection.documents.FindAll(i => i.docType == DocumentsetType.ImageSet); var imagesetId = string.Empty; if (imageDocumentList.Any()) { imagesetId = imageDocumentList.First().document.CollectionId.ToLower(); } if (documentCollection.IsIncludeNativeFile || documentCollection.IsDeleteTagsForOverlay || !string.IsNullOrWhiteSpace(imagesetId)) { foreach (var doc in nativeDocumentListOverlayInsert) { //Initializes the tag and redactable field values ResetRedactableFields(documentCollection, imagesetId, doc); } } AssignImagesetIdentifiers(ref nativeDocumentListOverlayInsert, documentCollection); AssignTagIdentifiers(ref nativeDocumentListOverlayInsert, isDeleteTags); //bulk documents indexing. _indexManagerProxy.BulkUpdateDocumentsAsync( nativeDocumentListOverlayInsert.ToDocumentList()); } #endregion #endregion #region "Update document Index status and contenet size in DB" var includedDocuments = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet).ToList(); BulkUpsertDocumentContentSizeAndIndexStatusInfo(_mMatter.FolderID, documentCollection.dataset.CollectionId, includedDocuments); #endregion } Send(documentCollection); SendLog(documentCollection, true, documentErrorCollection); } } catch (Exception ex) { ex.AddDbgMsg("Problem in indexing the documents in search sub system"); ex.Trace().Swallow(); ReportToDirector(ex); SendLog(documentCollection, false); } IncreaseProcessedDocumentsCount(documentCollection.documents.Count); // Debug //Tracer.Warning("Indexing worker handled {0} documents", documentCollection.documents.Count); }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { var documentCollection = message.Body as DocumentCollection; if (documentCollection == null || documentCollection.documents == null) { Tracer.Warning("Indexing worker receives empty batch"); return; } var documentErrorCollection = new DocumentErrorCollection(); try { #region Assertion documentCollection.ShouldNotBe(null); documentCollection.dataset.ShouldNotBe(null); #endregion if (documentCollection.documents != null && documentCollection.documents.Any()) //This check done to avoid null reference exception in MatterDAO.GetMatterDetails { var isDeleteTags = documentCollection.IsDeleteTagsForOverlay; if (null == _mMatter) { _mMatter = documentCollection.dataset.Matter; } // Initialize the instance of IndexBO _indexManagerProxy = new IndexManagerProxy(_mMatter.FolderID, documentCollection.dataset.CollectionId); using (new EVTransactionScope(TransactionScopeOption.Suppress)) { #region Insert var nativeDocumentListInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && n.IsNewDocument); if (nativeDocumentListInsert.Any()) { //// Adding imageset documents. AppendImagesetIdentifiers(ref nativeDocumentListInsert, documentCollection); documentErrorCollection = _indexManagerProxy.BulkIndexDocuments(nativeDocumentListInsert.ToDocumentList()); } #endregion #region Update #region "Is Not Same Content File var nativeDocumentListUpdate = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListUpdate.Any()) { //TODO : Verify - delete and recreate is necessary, in case of overlay //TODO: Search Engine Replacement - Search Sub System - Implement batch delete of documents from index ConstructOverlayDocuments(documentCollection, nativeDocumentListUpdate, documentCollection.IsDeleteTagsForOverlay); //TODO: Search Engine Replacement - Search Sub System - Ensure appropriate annotations happens in search index //TODO: Search Engine Replacement - Search Sub System - Ensure tags are updated for the documents in search index AssignReviewsetIdentifiers(ref nativeDocumentListUpdate); AssignImagesetIdentifiers(ref nativeDocumentListUpdate, documentCollection); AssignTagIdentifiers(ref nativeDocumentListUpdate, isDeleteTags); //TODO: Search Engine Replacement - Search Sub System - Implement to ingest batch of documents into search index _indexManagerProxy.BulkUpdateDocumentsAsync(nativeDocumentListUpdate.ToDocumentList()); } #endregion #region Same Content File var nativeDocumentListOverlayInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && !n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListOverlayInsert.Any()) { var imageDocumentList = documentCollection.documents.FindAll(i => i.docType == DocumentsetType.ImageSet); var imagesetId = string.Empty; if (imageDocumentList.Any()) { imagesetId = imageDocumentList.First().document.CollectionId.ToLower(); } if (documentCollection.IsIncludeNativeFile || documentCollection.IsDeleteTagsForOverlay || !string.IsNullOrWhiteSpace(imagesetId)) { foreach (var doc in nativeDocumentListOverlayInsert) { //Initializes the tag and redactable field values ResetRedactableFields(documentCollection, imagesetId, doc); } } AssignImagesetIdentifiers(ref nativeDocumentListOverlayInsert, documentCollection); AssignTagIdentifiers(ref nativeDocumentListOverlayInsert, isDeleteTags); //bulk documents indexing. _indexManagerProxy.BulkUpdateDocumentsAsync( nativeDocumentListOverlayInsert.ToDocumentList()); } #endregion #endregion #region "Update document Index status and contenet size in DB" var includedDocuments = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet).ToList(); BulkUpsertDocumentContentSizeAndIndexStatusInfo(_mMatter.FolderID, documentCollection.dataset.CollectionId, includedDocuments); #endregion } Send(documentCollection); SendLog(documentCollection, true, documentErrorCollection); } } catch (Exception ex) { ex.AddDbgMsg("Problem in indexing the documents in search sub system"); ex.Trace().Swallow(); ReportToDirector(ex); SendLog(documentCollection, false); } IncreaseProcessedDocumentsCount(documentCollection.documents.Count); // Debug //Tracer.Warning("Indexing worker handled {0} documents", documentCollection.documents.Count); }
/// <summary> /// Process the document for production fields /// </summary> /// <param name="lstProductionDocuments"></param> private void ProcessDocumentFields(List<ProductionDocumentDetail> lstProductionDocuments) { if (lstProductionDocuments.Any()) { var lstBatesAndDpnFields = new List<DocumentFieldsBEO>(); var indexDocFields = new Dictionary<string, List<KeyValuePair<string, string>>>(); foreach (var productionDocument in lstProductionDocuments) { if (_documentDetails.ContainsKey(productionDocument.DocumentId)) { productionDocument.Fields = _documentDetails[productionDocument.DocumentId]; } productionDocument.DocumentFields = _documentDetails; var lstDocFields = ConstructDocumentFields(productionDocument); if (lstDocFields != null && lstDocFields.Any()) { lstBatesAndDpnFields.AddRange(lstDocFields); ConstructIndexFields(productionDocument, lstDocFields, ref indexDocFields); } } if (lstBatesAndDpnFields.Any()) { var status = _documentVaultMngr.BulkAddOrUpdateDocumentFields(_mMatterId, _mCollectionId, lstBatesAndDpnFields); if (status) { var indexManagerProxy = new IndexManagerProxy(Int64.Parse(_mMatterId), _mCollectionId); var docs = new List<DocumentBeo>(); if (indexDocFields.Any()) { docs.AddRange(from doc in indexDocFields let fields = doc.Value.ToDictionary<KeyValuePair<string, string>, string, string> (field => field.Key, field => field.Value) select new DocumentBeo() { Id = doc.Key, Fields = fields }); } indexManagerProxy.BulkUpdateDocumentsAsync(docs); Tracer.Trace("{0} fields calling the method UpdatesBatesFields", indexDocFields.Count()); } } } Send(lstProductionDocuments); }
/// <summary> /// Unassigns the reviewset id for the list of documents from search server /// </summary> /// <param name="documents"></param> /// <param name="reviewset">ReviewsetRecord</param> private void UnAssignReviewsetInSearchIndex(List<ReviewsetDocumentBEO> documents, ReviewsetRecord reviewset) { const int batchSize = 1000; var processedCount = 0; var fields = new Dictionary<string, string> { {EVSystemFields.ReviewSetId, reviewset.SplitReviewSetId}, {EVSystemFields.BinderId, reviewset.BinderId} }; var indexManagerProxy = new IndexManagerProxy(reviewset.MatterId, reviewset.CollectionId); while (processedCount != documents.Count) { List<ReviewsetDocumentBEO> tmpDocuments; if ((documents.Count - processedCount) < batchSize) { tmpDocuments = documents.Skip(processedCount).Take(documents.Count - processedCount).ToList(); processedCount += documents.Count - processedCount; } else { tmpDocuments = documents.Skip(processedCount).Take(batchSize).ToList(); processedCount += batchSize; } var docs = tmpDocuments.Select(doc => new DocumentBeo() { Id = doc.DocumentId, Fields = fields }).ToList(); indexManagerProxy.BulkUnAssignFields(docs); } }