/// <summary> /// This is the overriden Shutdown() method. /// </summary> /// <param name="jobParameters">Input settings / parameters of the job.</param> protected override void Shutdown(GlobalReplaceJobBEO jobParameters) { try { LogMessage(Constants.ShutdownLogMessage, false, LogCategory.Job, null); GetGlobalReplaceBEO(jobParameters.BootParameters); JobLogInfo.CustomMessage = Constants.JobSummary; JobLogInfo.AddParameters(Constants.JobName, Constants.JOB_NAME); JobLogInfo.AddParameters(Constants.EV_AUDIT_ACTUAL_STRING, jobParameters.ActualString); JobLogInfo.AddParameters(Constants.EV_AUDIT_REPLACE_STRING, jobParameters.ReplaceString); } catch (EVException ex) { EvLog.WriteEntry(Constants.JOB_NAME + Constants.ShutdownErrorMessage, ex.ToUserString(), EventLogEntryType.Error); LogException(jobParameters.JobId, ex, Constants.ShutdownErrorMessage, LogCategory.Job, string.Empty, ErrorCodes.ProblemInJobExecution); } catch (Exception ex) { EvLog.WriteEntry(Constants.JOB_NAME + Constants.ShutdownErrorMessage, ex.Message, EventLogEntryType.Error); LogException(jobParameters.JobId, ex, Constants.ShutdownErrorMessage, LogCategory.Job, string.Empty, ErrorCodes.ProblemInJobExecution); } finally { _mMatter = null; _mOrginatorField = null; } }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { var documentCollection = message.Body as DocumentCollection; if (documentCollection == null || documentCollection.documents == null) { Tracer.Warning("Indexing worker receives empty batch"); return; } var documentErrorCollection = new DocumentErrorCollection(); try { #region Assertion documentCollection.ShouldNotBe(null); documentCollection.dataset.ShouldNotBe(null); #endregion if (documentCollection.documents != null && documentCollection.documents.Any()) //This check done to avoid null reference exception in MatterDAO.GetMatterDetails { var isDeleteTags = documentCollection.IsDeleteTagsForOverlay; if (null == _mMatter) { _mMatter = documentCollection.dataset.Matter; } // Initialize the instance of IndexBO _indexManagerProxy = new IndexManagerProxy(_mMatter.FolderID, documentCollection.dataset.CollectionId); using (new EVTransactionScope(TransactionScopeOption.Suppress)) { #region Insert var nativeDocumentListInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && n.IsNewDocument); if (nativeDocumentListInsert.Any()) { //// Adding imageset documents. AppendImagesetIdentifiers(ref nativeDocumentListInsert, documentCollection); documentErrorCollection= _indexManagerProxy.BulkIndexDocuments(nativeDocumentListInsert.ToDocumentList()); } #endregion #region Update #region "Is Not Same Content File var nativeDocumentListUpdate = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListUpdate.Any()) { //TODO : Verify - delete and recreate is necessary, in case of overlay //TODO: Search Engine Replacement - Search Sub System - Implement batch delete of documents from index ConstructOverlayDocuments(documentCollection, nativeDocumentListUpdate, documentCollection.IsDeleteTagsForOverlay); //TODO: Search Engine Replacement - Search Sub System - Ensure appropriate annotations happens in search index //TODO: Search Engine Replacement - Search Sub System - Ensure tags are updated for the documents in search index AssignReviewsetIdentifiers(ref nativeDocumentListUpdate); AssignImagesetIdentifiers(ref nativeDocumentListUpdate, documentCollection); AssignTagIdentifiers(ref nativeDocumentListUpdate, isDeleteTags); //TODO: Search Engine Replacement - Search Sub System - Implement to ingest batch of documents into search index _indexManagerProxy.BulkUpdateDocumentsAsync(nativeDocumentListUpdate.ToDocumentList()); } #endregion #region Same Content File var nativeDocumentListOverlayInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && !n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListOverlayInsert.Any()) { var imageDocumentList = documentCollection.documents.FindAll(i => i.docType == DocumentsetType.ImageSet); var imagesetId = string.Empty; if (imageDocumentList.Any()) { imagesetId = imageDocumentList.First().document.CollectionId.ToLower(); } if (documentCollection.IsIncludeNativeFile || documentCollection.IsDeleteTagsForOverlay || !string.IsNullOrWhiteSpace(imagesetId)) { foreach (var doc in nativeDocumentListOverlayInsert) { //Initializes the tag and redactable field values ResetRedactableFields(documentCollection, imagesetId, doc); } } AssignImagesetIdentifiers(ref nativeDocumentListOverlayInsert, documentCollection); AssignTagIdentifiers(ref nativeDocumentListOverlayInsert, isDeleteTags); //bulk documents indexing. _indexManagerProxy.BulkUpdateDocumentsAsync( nativeDocumentListOverlayInsert.ToDocumentList()); } #endregion #endregion #region "Update document Index status and contenet size in DB" var includedDocuments = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet).ToList(); BulkUpsertDocumentContentSizeAndIndexStatusInfo(_mMatter.FolderID, documentCollection.dataset.CollectionId, includedDocuments); #endregion } Send(documentCollection); SendLog(documentCollection, true, documentErrorCollection); } } catch (Exception ex) { ex.AddDbgMsg("Problem in indexing the documents in search sub system"); ex.Trace().Swallow(); ReportToDirector(ex); SendLog(documentCollection, false); } IncreaseProcessedDocumentsCount(documentCollection.documents.Count); // Debug //Tracer.Warning("Indexing worker handled {0} documents", documentCollection.documents.Count); }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { var documentCollection = message.Body as DocumentCollection; if (documentCollection == null || documentCollection.documents == null) { Tracer.Warning("Indexing worker receives empty batch"); return; } var documentErrorCollection = new DocumentErrorCollection(); try { #region Assertion documentCollection.ShouldNotBe(null); documentCollection.dataset.ShouldNotBe(null); #endregion if (documentCollection.documents != null && documentCollection.documents.Any()) //This check done to avoid null reference exception in MatterDAO.GetMatterDetails { var isDeleteTags = documentCollection.IsDeleteTagsForOverlay; if (null == _mMatter) { _mMatter = documentCollection.dataset.Matter; } // Initialize the instance of IndexBO _indexManagerProxy = new IndexManagerProxy(_mMatter.FolderID, documentCollection.dataset.CollectionId); using (new EVTransactionScope(TransactionScopeOption.Suppress)) { #region Insert var nativeDocumentListInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && n.IsNewDocument); if (nativeDocumentListInsert.Any()) { //// Adding imageset documents. AppendImagesetIdentifiers(ref nativeDocumentListInsert, documentCollection); documentErrorCollection = _indexManagerProxy.BulkIndexDocuments(nativeDocumentListInsert.ToDocumentList()); } #endregion #region Update #region "Is Not Same Content File var nativeDocumentListUpdate = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListUpdate.Any()) { //TODO : Verify - delete and recreate is necessary, in case of overlay //TODO: Search Engine Replacement - Search Sub System - Implement batch delete of documents from index ConstructOverlayDocuments(documentCollection, nativeDocumentListUpdate, documentCollection.IsDeleteTagsForOverlay); //TODO: Search Engine Replacement - Search Sub System - Ensure appropriate annotations happens in search index //TODO: Search Engine Replacement - Search Sub System - Ensure tags are updated for the documents in search index AssignReviewsetIdentifiers(ref nativeDocumentListUpdate); AssignImagesetIdentifiers(ref nativeDocumentListUpdate, documentCollection); AssignTagIdentifiers(ref nativeDocumentListUpdate, isDeleteTags); //TODO: Search Engine Replacement - Search Sub System - Implement to ingest batch of documents into search index _indexManagerProxy.BulkUpdateDocumentsAsync(nativeDocumentListUpdate.ToDocumentList()); } #endregion #region Same Content File var nativeDocumentListOverlayInsert = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet && !n.IsNewDocument && !n.OverlayIsNewContentFile).ToList(); if (nativeDocumentListOverlayInsert.Any()) { var imageDocumentList = documentCollection.documents.FindAll(i => i.docType == DocumentsetType.ImageSet); var imagesetId = string.Empty; if (imageDocumentList.Any()) { imagesetId = imageDocumentList.First().document.CollectionId.ToLower(); } if (documentCollection.IsIncludeNativeFile || documentCollection.IsDeleteTagsForOverlay || !string.IsNullOrWhiteSpace(imagesetId)) { foreach (var doc in nativeDocumentListOverlayInsert) { //Initializes the tag and redactable field values ResetRedactableFields(documentCollection, imagesetId, doc); } } AssignImagesetIdentifiers(ref nativeDocumentListOverlayInsert, documentCollection); AssignTagIdentifiers(ref nativeDocumentListOverlayInsert, isDeleteTags); //bulk documents indexing. _indexManagerProxy.BulkUpdateDocumentsAsync( nativeDocumentListOverlayInsert.ToDocumentList()); } #endregion #endregion #region "Update document Index status and contenet size in DB" var includedDocuments = documentCollection.documents.FindAll( n => n.docType == DocumentsetType.NativeSet).ToList(); BulkUpsertDocumentContentSizeAndIndexStatusInfo(_mMatter.FolderID, documentCollection.dataset.CollectionId, includedDocuments); #endregion } Send(documentCollection); SendLog(documentCollection, true, documentErrorCollection); } } catch (Exception ex) { ex.AddDbgMsg("Problem in indexing the documents in search sub system"); ex.Trace().Swallow(); ReportToDirector(ex); SendLog(documentCollection, false); } IncreaseProcessedDocumentsCount(documentCollection.documents.Count); // Debug //Tracer.Warning("Indexing worker handled {0} documents", documentCollection.documents.Count); }
/// <summary> /// This is the overridden Initialize() method. /// </summary> /// <param name="jobId">Job Identifier.</param> /// <param name="jobRunId">Job Run Identifier.</param> /// <param name="bootParameters">Boot Parameters.</param> /// <param name="createdBy">string</param> /// <returns>GlobalReplaceJobBEO</returns> protected override GlobalReplaceJobBEO Initialize(int jobId, int jobRunId, string bootParameters, string createdBy) { GlobalReplaceJobBEO jobBeo = null; try { LogMessage(Constants.InitializationStartMessage, false, LogCategory.Job, null); LogMessage(Constants.InitializationStartMessage, GetType(), "LexisNexis.Evolution.BatchJobs.FindandReplaceJob.Initialize", EventLogEntryType.Information, jobId, jobRunId); // Initialize the JobBEO jobBeo = new GlobalReplaceJobBEO { JobId = jobId, JobRunId = jobRunId, JobScheduleCreatedBy = createdBy, JobTypeName = Constants.Job_TYPE_NAME, BootParameters = bootParameters, JobName = Constants.JOB_NAME, StatusBrokerType = BrokerType.Database, CommitIntervalBrokerType = BrokerType.ConfigFile, CommitIntervalSettingType = SettingType.CommonSetting }; //filling properties of the job parameter // Default settings //constructing GlobalReplaceBEO from boot parameter by de serializing GlobalReplaceBEO globalReplaceContextBeo = GetGlobalReplaceBEO(bootParameters); globalReplaceContextBeo.CreatedBy = createdBy; // Set output batch size _mTaskBatchSize = Convert.ToInt16(ApplicationConfigurationManager.GetValue(Constants.ResultsPageSize)); EvLog.WriteEntry(jobId + Constants.AUDIT_BOOT_PARAMETER_KEY, Constants.AUDIT_BOOT_PARAMETER_VALUE, EventLogEntryType.Information); jobBeo.SearchContext = globalReplaceContextBeo.SearchContext; jobBeo.ActualString = globalReplaceContextBeo.ActualString; jobBeo.ReplaceString = globalReplaceContextBeo.ReplaceString; _mOrginatorField = new RVWDocumentFieldBEO { FieldName = Constants.OrginatorFieldName, FieldValue = Guid.NewGuid().ToString(), FieldId = -1 }; _mMatter = MatterDAO.GetMatterDetails(globalReplaceContextBeo.SearchContext.MatterId.ToString()); } catch (EVException ex) { EvLog.WriteEntry(jobId + " - " + Constants.InitializationFailMessage, ex.ToUserString(), EventLogEntryType.Error); LogException(jobId, ex, Constants.InitializationFailMessage, LogCategory.Job, string.Empty, ErrorCodes.ProblemInJobInitialization); } catch (Exception exp) { EvLog.WriteEntry(jobId + " - " + jobId.ToString(CultureInfo.InvariantCulture), Constants.InitializationFailMessage + ":" + exp.Message, EventLogEntryType.Information); LogException(jobId, exp, Constants.InitializationFailMessage, LogCategory.Job, string.Empty, ErrorCodes.ProblemInJobInitialization); } return(jobBeo); }