public OverlayDocumentManager(ImportBEO jobParameter, DatasetBEO dataset, string jobRunId, string workerInstanceId) { m_JobParameter = jobParameter; m_Dataset = dataset; m_JobRunId = jobRunId; m_WorkerInstanceId = workerInstanceId; }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { try { DocumentCollection recordParserResponse = (DocumentCollection)message.Body; #region Assertion recordParserResponse.ShouldNotBe(null); recordParserResponse.documents.ShouldNotBe(null); #endregion if (_jobParameter.IsAppend) { throw new Exception(Constants.ErrorMessageInvalidPipeLine); } if (recordParserResponse == null) { return; } if (recordParserResponse.dataset != null) { _dataset = recordParserResponse.dataset; } if (recordParserResponse.documents != null && recordParserResponse.documents.Any()) { ProcessDocuments(recordParserResponse); } } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); } }
public LawDocumentManager(LawImportBEO jobParams, string jobRunId, string workerInstanceId, DatasetBEO datasetDetails) { _jobParams = jobParams; _jobRunId = jobRunId; _workerInstanceId = workerInstanceId; _datasetDetails = datasetDetails; }
protected override void BeginWork() { //Tracer.Debug("BeginWork started"); base.BeginWork(); _exportLoadJobDetailBeo = Utils.SmartXmlDeserializer(BootParameters) as ExportLoadJobDetailBEO; Debug.Assert(_exportLoadJobDetailBeo != null, "exportLoadJobDetailBEO != null"); _dataset = DataSetBO.GetDatasetDetailsWithMatterInfo(Convert.ToInt64(_exportLoadJobDetailBeo.DatasetId), _exportLoadJobDetailBeo.MatterId); matterId = _dataset.Matter.FolderID; if (_exportLoadJobDetailBeo.RecreateFamilyGroup) { PipelineProperty pipelineProperty = GetPipelineSharedProperty(BeginsEndsPropertyName); lock (pipelineProperty) { if (pipelineProperty.Value == null) { //Tracer.Debug("CalculateBeginsAndEnds"); var endNumber = !string.IsNullOrEmpty(_exportLoadJobDetailBeo.FamilyEndNumberField) ? _exportLoadJobDetailBeo.FamilyEndNumberField : _exportLoadJobDetailBeo.FamilyBegNumberField; pipelineProperty.Value = CalculateBeginsAndEnds(_exportLoadJobDetailBeo.FamilyBegNumberField, endNumber); } _beginsEnds = pipelineProperty.Value as Dictionary <string, Tuple <string, string> >; } } //Tracer.Debug("BeginWork completed"); }
protected override void BeginWork() { base.BeginWork(); _reprocessJobParameter =(ConversionReprocessJobBeo)XmlUtility.DeserializeObject(BootParameters, typeof (ConversionReprocessJobBeo)); var baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(_reprocessJobParameter.OrginialJobId)); _lawSyncJobParameter =(LawSyncBEO) XmlUtility.DeserializeObject(baseConfig.BootParameters, typeof (LawSyncBEO)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_lawSyncJobParameter.DatasetId); var field =_dataset.DatasetFieldList.FirstOrDefault(f => f.FieldType.DataTypeId == Constants.DCNFieldTypeId); if (field != null) _dcnFieldName = field.Name; var lawField=_dataset.DatasetFieldList.FirstOrDefault(f => f.Name == EVSystemFields.LawDocumentId); if (lawField != null) _lawFieldId = lawField.ID; if (_lawSyncJobParameter.IsProduceImage) { _jobEVImagesDirectory = LawVolumeHelper.GetJobImageFolder(WorkAssignment.JobId, _lawSyncJobParameter.LawCaseId); _volumeCount++; _volumeFolderName = LawVolumeHelper.CreateVolumeFolder(_jobEVImagesDirectory, _volumeCount); } _vaultManager = EVUnityContainer.Resolve<IDocumentVaultManager>(Constants.DocumentVaultManager); }
protected override void BeginWork() { //Tracer.Debug("BeginWork started"); base.BeginWork(); _exportLoadJobDetailBeo = Utils.SmartXmlDeserializer(BootParameters) as ExportLoadJobDetailBEO; Debug.Assert(_exportLoadJobDetailBeo != null, "exportLoadJobDetailBEO != null"); _dataset = DataSetBO.GetDatasetDetailsWithMatterInfo(Convert.ToInt64(_exportLoadJobDetailBeo.DatasetId), _exportLoadJobDetailBeo.MatterId); matterId = _dataset.Matter.FolderID; if (_exportLoadJobDetailBeo.RecreateFamilyGroup) { PipelineProperty pipelineProperty = GetPipelineSharedProperty(BeginsEndsPropertyName); lock (pipelineProperty) { if (pipelineProperty.Value == null) { //Tracer.Debug("CalculateBeginsAndEnds"); var endNumber = !string.IsNullOrEmpty(_exportLoadJobDetailBeo.FamilyEndNumberField) ? _exportLoadJobDetailBeo.FamilyEndNumberField : _exportLoadJobDetailBeo.FamilyBegNumberField; pipelineProperty.Value = CalculateBeginsAndEnds(_exportLoadJobDetailBeo.FamilyBegNumberField, endNumber); } _beginsEnds = pipelineProperty.Value as Dictionary<string, Tuple<string, string>>; } } //Tracer.Debug("BeginWork completed"); }
private static void GetMatterDatasetDetails(ProductionDocumentDetail m_ProductionDocumentDetail, ProductionDetailsBEO m_BootParameters) { DatasetBEO dataset = DataSetBO.GetDataSetDetailForCollectionId(m_BootParameters.OriginalCollectionId); m_ProductionDocumentDetail.OriginalCollectionId = dataset.RedactableDocumentSetId; //Assign redactable set id as default collection id m_ProductionDocumentDetail.DatasetCollectionId = m_BootParameters.OriginalCollectionId; //Native set collection id m_ProductionDocumentDetail.OriginalDatasetName = dataset.FolderName; m_ProductionDocumentDetail.OriginalDatasetId = (int)dataset.FolderID; m_ProductionDocumentDetail.lstProductionFields = DataSetBO.GetDataSetFields(Convert.ToInt64(m_ProductionDocumentDetail.OriginalDatasetId), m_ProductionDocumentDetail.ProductionCollectionId); m_ProductionDocumentDetail.dataSetBeo = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(m_ProductionDocumentDetail.OriginalDatasetId)); m_ProductionDocumentDetail.lstDsFieldsBeo = DataSetBO.GetDataSetFields(Convert.ToInt64(m_ProductionDocumentDetail.OriginalDatasetId), m_ProductionDocumentDetail.DatasetCollectionId); m_ProductionDocumentDetail.matterBeo = MatterBO.GetMatterDetails(m_ProductionDocumentDetail.dataSetBeo.ParentID.ToString(CultureInfo.InvariantCulture)); m_ProductionDocumentDetail.SearchServerDetails = ServerBO.GetSearchServer(m_ProductionDocumentDetail.matterBeo.SearchServer.Id); }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { try { _parserResponse = (LoadFileRecordCollection)message.Body; #region Dataset Detaills if (_dataset == null) { _dataset = _parserResponse.dataset; if (_dataset != null && !string.IsNullOrEmpty(_dataset.CompressedFileExtractionLocation)) { _datasetPath = _dataset.CompressedFileExtractionLocation; } } if (String.IsNullOrEmpty(_uniqueThreadString)) { _uniqueThreadString = _parserResponse.UniqueThreadString; } #endregion ParseRecordText(); } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); LogMessage(false, ex.ToUserString()); } }
protected override void BeginWork() { base.BeginWork(); _reprocessJobParameter = (ConversionReprocessJobBeo)XmlUtility.DeserializeObject(BootParameters, typeof(ConversionReprocessJobBeo)); var baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(_reprocessJobParameter.OrginialJobId)); _lawSyncJobParameter = (LawSyncBEO)XmlUtility.DeserializeObject(baseConfig.BootParameters, typeof(LawSyncBEO)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_lawSyncJobParameter.DatasetId); var field = _dataset.DatasetFieldList.FirstOrDefault(f => f.FieldType.DataTypeId == Constants.DCNFieldTypeId); if (field != null) { _dcnFieldName = field.Name; } var lawField = _dataset.DatasetFieldList.FirstOrDefault(f => f.Name == EVSystemFields.LawDocumentId); if (lawField != null) { _lawFieldId = lawField.ID; } if (_lawSyncJobParameter.IsProduceImage) { _jobEVImagesDirectory = LawVolumeHelper.GetJobImageFolder(WorkAssignment.JobId, _lawSyncJobParameter.LawCaseId); _volumeCount++; _volumeFolderName = LawVolumeHelper.CreateVolumeFolder(_jobEVImagesDirectory, _volumeCount); } _vaultManager = EVUnityContainer.Resolve <IDocumentVaultManager>(Constants.DocumentVaultManager); }
public LawOverlayDocumentManager(LawImportBEO jobParams, DatasetBEO datasetBEO, string jobRunId, string workerInstanceId) { _jobParams = jobParams; _datasetBEO = datasetBEO; _jobRunId = jobRunId; _workerInstanceId = workerInstanceId; _overlayNewAndOldDocumentIdPairs = new Dictionary <string, string>(); }
protected override void EndWork() { base.EndWork(); _jobParameter = null; _dataset = null; _documentsFields = null; _documentsFieldsForSearchEngineUpdate = null; }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); }
protected override void BeginWork() { base.BeginWork(); _maxParallelThread = Convert.ToInt32(ApplicationConfigurationManager.GetValue("NumberOfMaxParallelism", "Export")); _exportLoadJobDetailBeo = Utils.SmartXmlDeserializer(BootParameters) as ExportLoadJobDetailBEO; _dataset = DataSetBO.GetDatasetDetailsWithMatterInfo(Convert.ToInt64(_exportLoadJobDetailBeo.DatasetId), _exportLoadJobDetailBeo.MatterId); }
protected override void BeginWork() { base.BeginWork(); _bootParameter = BootParameters; _exportLoadJobDetailBeo = Utils.SmartXmlDeserializer(_bootParameter) as ExportLoadJobDetailBEO; _dataset = DataSetBO.GetDatasetDetailsWithMatterInfo(Convert.ToInt64(_exportLoadJobDetailBeo.DatasetId), _exportLoadJobDetailBeo.MatterId); }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { var documentCollection = (LawDocumentCollection)message.Body; documentCollection.ShouldNotBe(null); documentCollection.Documents.ShouldNotBe(null); _dataset = documentCollection.Dataset; try { var documentDetailList = new List <DocumentDetail>(); var logs = new List <JobWorkerLog <LawImportLogInfo> >(); var docManager = new LawDocumentManager(_jobParams, PipelineId, WorkerId, _dataset); foreach (var doc in documentCollection.Documents) { JobWorkerLog <LawImportLogInfo> log; var docs = docManager.GetDocuments(doc.LawDocumentId.ToString(CultureInfo.InvariantCulture), doc.DocumentControlNumber, doc, out log); if (docs != null) { documentDetailList.AddRange(docs); } if (log != null) { logs.Add(log); } } //Log messages for missing native, missing images and missing text if (logs.Any()) { SendLog(logs); } if (_jobParams.ImportOptions == ImportOptionsBEO.AppendNew) { Send(documentDetailList); SendThreads(documentDetailList); SendFamilies(documentDetailList); return; } //Process documents for overlay scenario if (documentDetailList.Any()) { ProcessDocuments(documentDetailList); SendThreads(documentDetailList); SendFamilies(documentDetailList); } } catch (Exception ex) { ReportToDirector(ex.ToUserString()); ex.Trace().Swallow(); LogErrorMessage(documentCollection.Documents, false, ProcessFailedMessage); } }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; SetCommiyIndexStatusToInitialized(_jobParameter.MatterId); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (NearDuplicationJobBEO)XmlUtility.DeserializeObject(BootParameters, typeof(NearDuplicationJobBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); _jobParameter.CollectionId.ShouldNotBeEmpty(); // _jobParameter.JobName.ShouldNotBeEmpty(); #endregion _ingestionId = string.Format("{0}_Dataset-{1}_RunId-{2}", _jobParameter.JobName, _jobParameter.DatasetId, PipelineId); var familyThreshHold = ((_jobParameter.FamilyThreshold > 0) ? (byte)_jobParameter.FamilyThreshold : Constants.NearDuplicationFamilyThresholdDefaultValue); var clusterThreshHold = ((_jobParameter.ClusterThreshold > 0) ? (byte)_jobParameter.ClusterThreshold : Constants.NearDuplicationClusterThresholdDefaultValue); //Get Vault Database Connection string var documentVaultManager = new DocumentVaultManager(); _connectionString = documentVaultManager.GetConnectionStringForMatter(_jobParameter.MatterId); //Get Dataset details _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); _dataset.ShouldNotBe(null); //Get Polaris license server IP address var licenseServer = CmgServiceConfigBO.GetServiceConfigurationsforConfig(Constants.NearDuplicationPolarisLicenseKeyName); try { //Initialize Near Dupe Adapter _nearDuplicationAdapter = new EVPolarisNearDuplicationAdapter(); _nearDuplicationAdapter.InitializeAdapter(_connectionString, _ingestionId, licenseServer, clusterThreshHold, familyThreshHold); } catch (Exception ex) { Tracer.Error( "Near Duplication Start Up Worker: Failed to initialize Near duplication engine for job run id:{0}, exception:{1}", PipelineId, ex); throw; } // _nearDuplicationAdapter.DeleteAllData(); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (NearDuplicationJobBEO) XmlUtility.DeserializeObject(BootParameters, typeof (NearDuplicationJobBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); _jobParameter.CollectionId.ShouldNotBeEmpty(); // _jobParameter.JobName.ShouldNotBeEmpty(); #endregion _ingestionId = string.Format("{0}_Dataset-{1}_RunId-{2}", _jobParameter.JobName, _jobParameter.DatasetId, PipelineId); var familyThreshHold = ((_jobParameter.FamilyThreshold > 0) ? (byte) _jobParameter.FamilyThreshold : Constants.NearDuplicationFamilyThresholdDefaultValue); var clusterThreshHold = ((_jobParameter.ClusterThreshold > 0) ? (byte) _jobParameter.ClusterThreshold : Constants.NearDuplicationClusterThresholdDefaultValue); //Get Vault Database Connection string var documentVaultManager = new DocumentVaultManager(); _connectionString = documentVaultManager.GetConnectionStringForMatter(_jobParameter.MatterId); //Get Dataset details _dataset= DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); _dataset.ShouldNotBe(null); //Get Polaris license server IP address var licenseServer = CmgServiceConfigBO.GetServiceConfigurationsforConfig(Constants.NearDuplicationPolarisLicenseKeyName); try { //Initialize Near Dupe Adapter _nearDuplicationAdapter = new EVPolarisNearDuplicationAdapter(); _nearDuplicationAdapter.InitializeAdapter(_connectionString, _ingestionId, licenseServer, clusterThreshHold, familyThreshHold); } catch (Exception ex) { Tracer.Error( "Near Duplication Start Up Worker: Failed to initialize Near duplication engine for job run id:{0}, exception:{1}", PipelineId, ex); throw; } // _nearDuplicationAdapter.DeleteAllData(); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { base.BeginWork(); try { m_Parameters = DocumentImportHelper.GetProfileBeo((string)BootParameters); m_CounterForCorrelationId = 0; InitializeConfigurationItems(); //?? need to percentage completion m_PercenatgeCompletion = 100; m_Documents = new List <RVWDocumentBEO>(); #region Get Dataset Details if (m_Parameters != null && m_Parameters.DatasetDetails.FolderID > 0) { m_FileProcessor = FileProcessorFactory.CreateFileProcessor( FileProcessorFactory.ExtractionChoices.CompoundFileExtraction); m_Dataset = DataSetBO.GetDataSetDetailForDataSetId(m_Parameters.DatasetDetails.FolderID); if (m_Dataset.Matter != null && m_Dataset.Matter.FolderID > 0) { var matterDetails = MatterDAO.GetMatterDetails(m_Dataset.Matter.FolderID.ToString()); if (matterDetails != null) { m_Dataset.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); if (searchServerDetails != null) { m_Dataset.Matter.SearchServer = searchServerDetails; } } } else { throw new EVException().AddErrorCode(ErrorCodes.EDLoaderExtractionWorker_FailedToObtainMatterDetails); //?? need to set message in resource file } } else { throw new EVException().AddErrorCode(ErrorCodes.EDLoaderExtractionWorker_ObtainDatasetDetailsFailure); //?? need to set message in resource file } #endregion } catch (Exception ex) { ex.Trace().Swallow(); } }
protected override void ProcessMessage(PipeMessageEnvelope message) { try { if (message.Body == null) { return; } var lawDocumentsList = (LawSyncDocumentCollection) message.Body; if (_jobParameter == null) { if (lawDocumentsList.IsLawSyncReprocessJob) { _jobParameter = lawDocumentsList.OrginalJobParameter; } else { _jobParameter = (LawSyncBEO) XmlUtility.DeserializeObject(BootParameters, typeof (LawSyncBEO)); } } _datasetCollectionId = lawDocumentsList.DatasetCollectionId; _lawSyncJobId = lawDocumentsList.LawSynJobId; _logInfoList = new List<JobWorkerLog<LawSyncLogInfo>>(); _documentProcessStateList = new List<DocumentConversionLogBeo>(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); if (lawDocumentsList.Documents.Any()) { foreach (var lawDocument in lawDocumentsList.Documents) { SetMetadataForDcoument(lawDocument); } } if (_documentProcessStateList.Any()) { UpdateDcoumentProcessState(_documentProcessStateList); } Send(lawDocumentsList); if (_logInfoList.Any()) { SendLogPipe(_logInfoList); } } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); LogMessage(Constants.LawSyncFailureinGetMetadataMessage + ex.ToUserString()); } }
/// <summary> /// To set field id for created fields /// </summary> /// <param name="fields"></param> private void SetFieldIdForCreatedFields(List <LawFieldBEO> fields) { _datasetDetails = DataSetBO.GetDataSetDetailForDataSetId(_jobParams.FolderId); foreach (var field in fields) { var datasetField = _datasetDetails.DatasetFieldList.Find(f => f.Name.Equals(field.MappingFieldName)); if (datasetField != null) { field.MappingFieldId = datasetField.ID.ToString(CultureInfo.InvariantCulture); } } _selectedFields = fields; }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (CategorizeInfo)XmlUtility.DeserializeObject(BootParameters, typeof(CategorizeInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); _projectInfo = AnalyticsProject.Get(_jobParameter.MatterId.ToString(CultureInfo.InvariantCulture), _jobParameter.DatasetId.ToString(CultureInfo.InvariantCulture), _jobParameter.ProjectId.ToString(CultureInfo.InvariantCulture)); SetCommiyIndexStatusToInitialized(_jobParameter.MatterId); }
protected override void ProcessMessage(PipeMessageEnvelope message) { try { if (message.Body == null) { return; } var lawDocumentsList = (LawSyncDocumentCollection)message.Body; if (_jobParameter == null) { if (lawDocumentsList.IsLawSyncReprocessJob) { _jobParameter = lawDocumentsList.OrginalJobParameter; } else { _jobParameter = (LawSyncBEO)XmlUtility.DeserializeObject(BootParameters, typeof(LawSyncBEO)); } } _datasetCollectionId = lawDocumentsList.DatasetCollectionId; _lawSyncJobId = lawDocumentsList.LawSynJobId; _logInfoList = new List <JobWorkerLog <LawSyncLogInfo> >(); _documentProcessStateList = new List <DocumentConversionLogBeo>(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); if (lawDocumentsList.Documents.Any()) { foreach (var lawDocument in lawDocumentsList.Documents) { SetMetadataForDcoument(lawDocument); } } if (_documentProcessStateList.Any()) { UpdateDcoumentProcessState(_documentProcessStateList); } Send(lawDocumentsList); if (_logInfoList.Any()) { SendLogPipe(_logInfoList); } } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); LogMessage(Constants.LawSyncFailureinGetMetadataMessage + ex.ToUserString()); } }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (CategorizeInfo) XmlUtility.DeserializeObject(BootParameters, typeof (CategorizeInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); _projectInfo = AnalyticsProject.Get(_jobParameter.MatterId.ToString(CultureInfo.InvariantCulture), _jobParameter.DatasetId.ToString(CultureInfo.InvariantCulture), _jobParameter.ProjectId.ToString(CultureInfo.InvariantCulture)); SetCommiyIndexStatusToInitialized(_jobParameter.MatterId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _reprocessJobParameter = (ConversionReprocessJobBeo)XmlUtility.DeserializeObject(BootParameters, typeof(ConversionReprocessJobBeo)); var baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(_reprocessJobParameter.OrginialJobId)); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(baseConfig.BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _analyticProject = new AnalyticsProject(); _totalDocumentCount = _analyticProject.GetProjectDocumentsCount( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId); _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("UpdateFieldsBatchSize", "AnalyticsProject")); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _analyticProject = new AnalyticsProject(); _totalDocumentCount = _analyticProject.GetProjectDocumentsCount(Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId); _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("UpdateFieldsBatchSize", "AnalyticsProject")); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); }
/// <summary> /// Processes the work item. /// </summary> /// <param name="message">The message.</param> protected override void ProcessMessage(PipeMessageEnvelope message) { var documentCollection = (LawDocumentCollection)message.Body; documentCollection.ShouldNotBe(null); documentCollection.Documents.ShouldNotBe(null); _dataset = documentCollection.Dataset; try { var documentDetailList = new List<DocumentDetail>(); var logs = new List<JobWorkerLog<LawImportLogInfo>>(); var docManager = new LawDocumentManager(_jobParams, PipelineId, WorkerId, _dataset); foreach (var doc in documentCollection.Documents) { JobWorkerLog<LawImportLogInfo> log; var docs = docManager.GetDocuments(doc.LawDocumentId.ToString(CultureInfo.InvariantCulture), doc.DocumentControlNumber, doc, out log); if (docs != null) documentDetailList.AddRange(docs); if (log != null) logs.Add(log); } //Log messages for missing native, missing images and missing text if (logs.Any()) SendLog(logs); if (_jobParams.ImportOptions == ImportOptionsBEO.AppendNew) { Send(documentDetailList); SendThreads(documentDetailList); SendFamilies(documentDetailList); return; } //Process documents for overlay scenario if (documentDetailList.Any()) { ProcessDocuments(documentDetailList); SendThreads(documentDetailList); SendFamilies(documentDetailList); } } catch (Exception ex) { ReportToDirector(ex.ToUserString()); ex.Trace().Swallow(); LogErrorMessage(documentCollection.Documents, false, ProcessFailedMessage); } }
public LoadFileDocumentManager(ImportBEO jobParameter, string threadingString, string datasetPath, DatasetBEO dataset, string jobRunId, string workerInstanceId,int jobId) { m_JobParameter = jobParameter; m_ColumnDelimiter = Convert.ToChar(m_JobParameter.LoadFile.ColumnDelimiter); m_QuoteCharacter = Convert.ToChar(m_JobParameter.LoadFile.QuoteCharacter); m_FieldRowDelimiter = Convert.ToChar(m_JobParameter.LoadFile.NewlineDelimiter); m_SourceFile = new Uri(m_JobParameter.Locations.First()); m_ThreadingConstraint = threadingString; m_DatasetPath = datasetPath; m_Dataset = dataset; m_ConcordanceFieldSplitter = Constants.ConcordanceFieldSplitter; m_JobRunId = jobRunId; m_WorkerInstanceId = workerInstanceId; m_ImportDescription = jobParameter.ImportDetail; _JobId = jobId; }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; if (!_jobParameter.IsAddAdditionalDocuments || !string.IsNullOrEmpty(_jobIds)) return; _jobIds = GetIncludeJobIds(); }
/// <summary> /// Worker begin work event /// </summary> protected override void BeginWork() { try { base.BeginWork(); _jobParams = GetJobParams(BootParameters); _jobParams.ShouldNotBe(null); _jobParams.FolderId.ShouldBeGreaterThan(0); _datasetDetails = DataSetBO.GetDataSetDetailForDataSetId(_jobParams.FolderId); var matterDetails = MatterDAO.GetMatterDetails(_jobParams.MatterId.ToString(CultureInfo.InvariantCulture)); matterDetails.ShouldNotBe(null); _datasetDetails.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); searchServerDetails.ShouldNotBe(null); _datasetDetails.Matter.SearchServer = searchServerDetails; if (!LawBO.TestServerConnection(_jobParams.LawCaseId)) { ReportToDirector("Failed to connect Law server. Please see application log for details."); } if (EVHttpContext.CurrentContext == null) { // Moq the session MockSession(_jobParams.CreatedBy); } //Create fields for selected law fields CreateSelectedLawFields(); //Create tags for selected law tags CreateSelectedLawTags(); //Law import batch size for documents _batchSize = GetMessageBatchSize(); } catch (Exception ex) { //Send log infor to Log worker LogMessage(false, ex.ToUserString()); ReportToDirector(ex.ToUserString()); throw; } }
protected override void BeginWork() { base.BeginWork(); //Creating a stringReader stream for the bootparameter var stream = new StringReader(BootParameters); //Ceating xmlStream for xmlserialization var xmlStream = new XmlSerializer(typeof(BulkPrintServiceRequestBEO)); //Deserialization of bootparameter to get BulkPrintServiceRequestBEO bulkPrintServiceRequestBEO = (BulkPrintServiceRequestBEO)xmlStream.Deserialize(stream); sharedLocation = bulkPrintServiceRequestBEO.FolderPath; mappedPrinter = PrinterManagementBusiness.GetMappedPrinter(new MappedPrinterIdentifierBEO(bulkPrintServiceRequestBEO.Printer.UniqueIdentifier.Split(Constants.Split).Last(), true)); sourceLocation = Path.Combine(Path.Combine(sharedLocation, bulkPrintServiceRequestBEO.Name), Constants.SourceDirectoryPath); jobRunId = (!String.IsNullOrEmpty(PipelineId)) ? Convert.ToInt32(PipelineId) : 0; //Get Dataset details for a given Collection Id _mDataSet = DataSetBO.GetDataSetDetailForCollectionId(bulkPrintServiceRequestBEO.DataSet.CollectionId); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (LawSyncBEO)XmlUtility.DeserializeObject(BootParameters, typeof(LawSyncBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); #endregion try { //Get DCN field name _dataset = GetDatasetDetails(_jobParameter.DatasetId, _jobParameter.MatterId); var field = _dataset.DatasetFieldList.FirstOrDefault(f => f.FieldType.DataTypeId == Constants.DCNFieldTypeId); if (field != null) { _dcnFieldName = field.Name; } var lawField = _dataset.DatasetFieldList.FirstOrDefault(f => f.Name == EVSystemFields.LawDocumentId); if (lawField != null) { _lawFieldId = lawField.ID; } //Create Volume for Images if (_jobParameter.IsProduceImage) { _jobEVImagesDirectory = LawVolumeHelper.GetJobImageFolder(WorkAssignment.JobId, _jobParameter.LawCaseId); _volumeCount++; _volumeFolderName = LawVolumeHelper.CreateVolumeFolder(_jobEVImagesDirectory, _volumeCount); } _vaultManager = EVUnityContainer.Resolve <IDocumentVaultManager>(Constants.DocumentVaultManager); } catch (Exception ex) { ex.Trace(); ReportToDirector(ex); ConstructLog(Constants.LawSyncStartupFailureMessage); throw; } }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _documentBachSize = Convert.ToInt32( ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectInSubSystemJobBatchSize", "AnalyticsProject")); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; _totalDocumentCount = _analyticProject.GetProjectDocumentsCountByTaskId( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId, _jobParameter.PrimarySystemJobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); if (_jobParameter.IsRerunJob || _jobParameter.IsAddAdditionalDocuments) //Rerun job or Add additional documents- need get to get existing IndexId ,if already created { _indexId = AnalyticsProject.GetIndexIdForProject(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, false); } if (string.IsNullOrEmpty(_indexId)) { _indexId = "idx-" + Guid.NewGuid().ToString().ToLowerInvariant(); _analyticProject.InsertIndexId(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, _indexId); } AnalyticsProject.CreateAnalyticalIndex(_jobParameter.MatterId, WorkAssignment.JobId, _indexId); //Create Index in Spark SVM.. IncreaseProcessedDocumentsCount(_totalDocumentCount); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _totalDocumentCount = _analyticProject.GetSelectedDocumentsCount(_dataset.CollectionId, _jobParameter, WorkAssignment.JobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; IncreaseProcessedDocumentsCount(_totalDocumentCount); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; if (!_jobParameter.IsAddAdditionalDocuments || !string.IsNullOrEmpty(_jobIds)) { return; } _jobIds = GetIncludeJobIds(); }
/// <summary> /// End of the work. process /// Update the job's boot parameter with total number of document processed and number of failed documents /// </summary> protected override void EndWork() { var bootParam = GetBootObject <ConversionReprocessJobBeo>(BootParameters); //the corresponding job id int jobId = WorkAssignment.JobId; //get various count DatasetBEO ds = DataSetBO.GetDataSetDetailForDataSetId(bootParam.DatasetId); long matterId = ds.Matter.FolderID; var vault = VaultRepository.CreateRepository(matterId); int totalDocCount; int failedDocCount; int succeedDocCount; vault.GetReconversionDocStatusCount(jobId, out totalDocCount, out succeedDocCount, out failedDocCount); bootParam.TotalDocCount = totalDocCount; bootParam.FailedDocCount = failedDocCount; //bootParam.SucceedDocCount = succeedDocCount; //re serialize the boot param string newBootParam; var serializer = new XmlSerializer(typeof(ConversionReprocessJobBeo)); using (var writer = new StringWriter()) { serializer.Serialize(writer, bootParam); newBootParam = writer.ToString(); } //update the boot parameters for the job ReconversionDAO.UpdateReconversionBootParamter(jobId, newBootParam); //clean up reconversion input file that contain the list of document to convert SafeDeleteFile(bootParam.FilePath); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo) XmlUtility.DeserializeObject(BootParameters, typeof (AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _documentBachSize = Convert.ToInt32( ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectInSubSystemJobBatchSize", "AnalyticsProject")); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; _totalDocumentCount = _analyticProject.GetProjectDocumentsCountByTaskId( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId, _jobParameter.PrimarySystemJobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); if (_jobParameter.IsRerunJob || _jobParameter.IsAddAdditionalDocuments) //Rerun job or Add additional documents- need get to get existing IndexId ,if already created { _indexId = AnalyticsProject.GetIndexIdForProject(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId,false); } if(string.IsNullOrEmpty(_indexId)) { _indexId = "idx-" + Guid.NewGuid().ToString().ToLowerInvariant(); _analyticProject.InsertIndexId(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, _indexId); } AnalyticsProject.CreateAnalyticalIndex(_jobParameter.MatterId, WorkAssignment.JobId, _indexId); //Create Index in Spark SVM.. IncreaseProcessedDocumentsCount(_totalDocumentCount); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (NearDuplicationJobBEO) XmlUtility.DeserializeObject(BootParameters, typeof (NearDuplicationJobBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); _jobParameter.CollectionId.ShouldNotBeEmpty(); #endregion _dataset = GetDatasetDetails(_jobParameter.DatasetId, _jobParameter.MatterId.ToString(CultureInfo.InvariantCulture)); _dataset.ShouldNotBe(null); _indexManagerProxy = new IndexManagerProxy(_dataset.Matter.FolderID,_dataset.CollectionId); SetNearDuplicationFieldId(); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (NearDuplicationJobBEO)XmlUtility.DeserializeObject(BootParameters, typeof(NearDuplicationJobBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); _jobParameter.CollectionId.ShouldNotBeEmpty(); #endregion _dataset = GetDatasetDetails(_jobParameter.DatasetId, _jobParameter.MatterId.ToString(CultureInfo.InvariantCulture)); _dataset.ShouldNotBe(null); _indexManagerProxy = new IndexManagerProxy(_dataset.Matter.FolderID, _dataset.CollectionId); SetNearDuplicationFieldId(); }
protected override void BeginWork() { base.BeginWork(); _jobParameter = (LawSyncBEO) XmlUtility.DeserializeObject(BootParameters, typeof (LawSyncBEO)); #region "Assertion" _jobParameter.DatasetId.ShouldBeGreaterThan(0); _jobParameter.MatterId.ShouldBeGreaterThan(0); #endregion try { //Get DCN field name _dataset = GetDatasetDetails(_jobParameter.DatasetId, _jobParameter.MatterId); var field = _dataset.DatasetFieldList.FirstOrDefault(f => f.FieldType.DataTypeId == Constants.DCNFieldTypeId); if (field != null) _dcnFieldName = field.Name; var lawField = _dataset.DatasetFieldList.FirstOrDefault(f => f.Name == EVSystemFields.LawDocumentId); if (lawField != null) _lawFieldId = lawField.ID; //Create Volume for Images if (_jobParameter.IsProduceImage) { _jobEVImagesDirectory = LawVolumeHelper.GetJobImageFolder(WorkAssignment.JobId, _jobParameter.LawCaseId); _volumeCount++; _volumeFolderName = LawVolumeHelper.CreateVolumeFolder(_jobEVImagesDirectory, _volumeCount); } _vaultManager = EVUnityContainer.Resolve<IDocumentVaultManager>(Constants.DocumentVaultManager); } catch (Exception ex) { ex.Trace(); ReportToDirector(ex); ConstructLog(Constants.LawSyncStartupFailureMessage); throw; } }
/// <summary> /// This method converts the CreateReviewSetJobBEO to ReviewsetRecord /// </summary> /// <param name="reviewSetJobBEO">CreateReviewSetJobBEO/param> /// <returns>ReviewsetRecord</returns> private ReviewsetRecord ConvertToReviewSetRecord(UpdateReviewSetJobBEO reviewSetJobBEO, DatasetBEO dataset) { var rSetRecord = new ReviewsetRecord { Activity = "Split", DatasetId = reviewSetJobBEO.DatasetId, MatterId = dataset.Matter.FolderID, BinderFolderId = reviewSetJobBEO.BinderFolderId, BinderId = _binderEntity.BinderId, BinderName = _binderEntity.BinderName, DueDate = reviewSetJobBEO.DueDate, KeepDuplicatesTogether = reviewSetJobBEO.KeepDuplicates, KeepFamilyTogether = reviewSetJobBEO.KeepFamily, NumberOfDocuments = reviewSetJobBEO.NumberOfDocuments, NumberOfDocumentsPerSet = reviewSetJobBEO.NumberOfDocumentsPerSet, NumberOfReviewedDocs = reviewSetJobBEO.NumberOfReviewedDocs, NumberOfReviewSets = reviewSetJobBEO.NumberOfReviewSets, ReviewSetDescription = reviewSetJobBEO.ReviewSetDescription, ReviewSetGroup = reviewSetJobBEO.ReviewSetGroup, ReviewSetId = reviewSetJobBEO.ReviewSetId, ReviewSetLogic = reviewSetJobBEO.ReviewSetLogic, ReviewSetName = reviewSetJobBEO.ReviewSetName, SplittingOption = reviewSetJobBEO.SplittingOption, StartDate = reviewSetJobBEO.StartDate, StatusId = reviewSetJobBEO.StatusId, CreatedBy = reviewSetJobBEO.CreatedByGUID, CollectionId = dataset.CollectionId, AssignTo = reviewSetJobBEO.AssignTo }; rSetRecord.DsTags.AddRange(reviewSetJobBEO.DsTags); rSetRecord.ReviewSetUserList.AddRange(reviewSetJobBEO.ReviewSetUserList); return rSetRecord; }
/// <summary> /// Worker begin work event /// </summary> protected override void BeginWork() { try { base.BeginWork(); _jobParams = GetJobParams(BootParameters); _jobParams.ShouldNotBe(null); _jobParams.FolderId.ShouldBeGreaterThan(0); _datasetDetails = DataSetBO.GetDataSetDetailForDataSetId(_jobParams.FolderId); var matterDetails = MatterDAO.GetMatterDetails(_jobParams.MatterId.ToString(CultureInfo.InvariantCulture)); matterDetails.ShouldNotBe(null); _datasetDetails.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); searchServerDetails.ShouldNotBe(null); _datasetDetails.Matter.SearchServer = searchServerDetails; if (!LawBO.TestServerConnection(_jobParams.LawCaseId)) ReportToDirector("Failed to connect Law server. Please see application log for details."); if (EVHttpContext.CurrentContext == null) { // Moq the session MockSession(_jobParams.CreatedBy); } //Create fields for selected law fields CreateSelectedLawFields(); //Create tags for selected law tags CreateSelectedLawTags(); //Law import batch size for documents _batchSize = GetMessageBatchSize(); } catch (Exception ex) { //Send log infor to Log worker LogMessage(false, ex.ToUserString()); ReportToDirector(ex.ToUserString()); throw; } }
/// <summary> /// Constructs and returns the document search query entity /// </summary> /// <param name="jobParameters">The job parameters.</param> /// <param name="datasetEntity">The dataset entity.</param> /// <returns></returns> private DocumentQueryEntity GetQueryEntity(CreateReviewSetJobBEO jobParameters, DatasetBEO datasetEntity, int startIndex, int documentCount, List<Field> outputFields) { DocumentQueryEntity documentQueryEntity = new DocumentQueryEntity { QueryObject = new SearchQueryEntity { MatterId = datasetEntity.Matter.FolderID, IsConceptSearchEnabled = jobParameters.DocumentSelectionContext.SearchContext.IsConceptSearchEnabled, DatasetId = datasetEntity.FolderID } }; documentQueryEntity.IgnoreDocumentSnippet = true; documentQueryEntity.DocumentStartIndex = startIndex; documentQueryEntity.DocumentCount = documentCount; documentQueryEntity.SortFields.Add(new Sort { SortBy = Constants.Relevance }); if (outputFields != null && outputFields.Any()) { documentQueryEntity.OutputFields.AddRange(outputFields); } var tmpQuery = string.Empty; var selectionQuery = string.Empty; if (!string.IsNullOrEmpty(jobParameters.DocumentSelectionContext.SearchContext.Query)) { tmpQuery = string.Format("({0} )", jobParameters.DocumentSelectionContext.SearchContext.Query); } else { tmpQuery = string.Format("(NOT ({0}:\"{1}\"))", EVSystemFields.BinderId.ToLowerInvariant(), _binderEntity.BinderId); } switch (jobParameters.DocumentSelectionContext.GenerateDocumentMode) { case DocumentSelectMode.UseSelectedDocuments: { jobParameters.DocumentSelectionContext.SelectedDocuments.ForEach(d => selectionQuery += string.Format("{0}:\"{1}\" OR ", EVSystemFields.DocumentKey, d)); if (!string.IsNullOrEmpty(selectionQuery)) { selectionQuery = selectionQuery.Substring(0, selectionQuery.LastIndexOf(" OR ")); tmpQuery = string.Format("({0} AND ({1}))", tmpQuery, selectionQuery); } break; } case DocumentSelectMode.QueryAndExclude: { jobParameters.DocumentSelectionContext.DocumentsToExclude.ForEach(d => selectionQuery += string.Format("(NOT {0}:\"{1}\") AND ", EVSystemFields.DocumentKey, d)); if (!string.IsNullOrEmpty(selectionQuery)) { selectionQuery = selectionQuery.Substring(0, selectionQuery.LastIndexOf(" AND ")); tmpQuery = string.Format("({0} AND ({1}))", tmpQuery, selectionQuery); } break; } } documentQueryEntity.QueryObject.QueryList.Clear(); documentQueryEntity.QueryObject.QueryList.Add(new Query { SearchQuery = tmpQuery }); return documentQueryEntity; }
/// <summary> /// Absorb the boot parameters, deserialize and pass on the messages to the Search Worker /// </summary> public void DoBeginWork(string bootParameter) { bootParameter.ShouldNotBeEmpty(); // Deserialize and determine the boot object _bootObject = GetBootObject(bootParameter); // Assert condition to check for jobscheduled by _bootObject.JobScheduleCreatedBy.ShouldNotBeEmpty(); _bootObject.BinderFolderId.ShouldNotBe(0); // Get Dataset Details to know about the Collection id and the Matter ID details _datasetEntity = DataSetBO.GetDataSetDetailForDataSetId(_bootObject.datasetId); //Assert condition to check for dataset details _datasetEntity.ShouldNotBe(null); _binderEntity = BinderBO.GetBinderDetails(_bootObject.BinderFolderId.ToString()); _binderEntity.ShouldNotBe(null); _reviewSetRecord = ConvertToReviewSetRecord(_bootObject); // Construct the document query entity to determine the total documents _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, 1, null); // Mock the user session MockSession(); _docQueryEntity.TransactionName = _docQueryEntity.QueryObject.TransactionName = "ReviewsetStartupWorker - DoBeginWork (GetCount)"; var reviewsetLogic = _reviewSetRecord.ReviewSetLogic.ToLower(); if (reviewsetLogic == "all" || reviewsetLogic == "tag") { var searchQuery = !string.IsNullOrEmpty(_bootObject.SearchQuery)? _bootObject.SearchQuery.Replace("\"", ""): string.Empty; _totalDocumentCount = DocumentBO.GetDocumentCountForCreateReviewsetJob(_datasetEntity.Matter.FolderID, _datasetEntity.CollectionId, reviewsetLogic, searchQuery); } else { // Retrieve the total documents qualified _totalDocumentCount = ReviewerSearchInstance.GetDocumentCount(_docQueryEntity.QueryObject); } Tracer.Info("Reviewset Startup Worker : {0} matching documents determined for the requested query", _totalDocumentCount); if (_totalDocumentCount < 1) { var message = String.Format("Search server does not return any documents for the reviewset '{0}'", _reviewSetRecord.ReviewSetName); throw new ApplicationException(message); } LogMessage(true, string.Format("{0} documents are qualified", _totalDocumentCount)); // Construct the document query entity to write the resultant documents in xml file var outputFields = new List<Field>(); outputFields.AddRange(new List<Field>() { new Field { FieldName = EVSystemFields.DcnField}, new Field { FieldName = EVSystemFields.FamilyId}, new Field { FieldName = EVSystemFields.DuplicateId} }); _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, Convert.ToInt32(_totalDocumentCount), outputFields); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { base.BeginWork(); try { m_Parameters = DocumentImportHelper.GetProfileBeo((string)BootParameters); m_CounterForCorrelationId = 0; InitializeConfigurationItems(); //?? need to percentage completion m_PercenatgeCompletion = 100; m_Documents = new List<RVWDocumentBEO>(); #region Get Dataset Details if (m_Parameters != null && m_Parameters.DatasetDetails.FolderID > 0) { m_FileProcessor = FileProcessorFactory.CreateFileProcessor( FileProcessorFactory.ExtractionChoices.CompoundFileExtraction); m_Dataset = DataSetBO.GetDataSetDetailForDataSetId(m_Parameters.DatasetDetails.FolderID); if (m_Dataset.Matter != null && m_Dataset.Matter.FolderID > 0) { var matterDetails = MatterDAO.GetMatterDetails(m_Dataset.Matter.FolderID.ToString()); if (matterDetails != null) { m_Dataset.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); if (searchServerDetails != null) { m_Dataset.Matter.SearchServer = searchServerDetails; } } } else throw new EVException().AddErrorCode(ErrorCodes.EDLoaderExtractionWorker_FailedToObtainMatterDetails); //?? need to set message in resource file } else { throw new EVException().AddErrorCode(ErrorCodes.EDLoaderExtractionWorker_ObtainDatasetDetailsFailure); //?? need to set message in resource file } #endregion } catch (Exception ex) { ex.Trace().Swallow(); } }
/// <summary> /// Generates No.of Reviewsets to be created tasks /// </summary> /// <param name="jobParameters">Create Reviewset BEO</param> /// <param name="lastCommitedTaskCount"> </param> /// <returns>List of Create ReviewsetJob Tasks (BEOs)</returns> protected override Tasks <UpdateReviewSetTaskBEO> GenerateTasks(UpdateReviewSetJobBEO jobParameters, out int lastCommitedTaskCount) { Tasks <UpdateReviewSetTaskBEO> tasks = new Tasks <UpdateReviewSetTaskBEO>(); lastCommitedTaskCount = 0; try { if (jobParameters != null) { /* Get Dataset Details for dataset id to get know about the Collection id and the Matter ID*/ DatasetBEO datasetEntity = DataSetService.GetDataSet(jobParameters.datasetId.ToString(CultureInfo.InvariantCulture)); string sMatterId = datasetEntity.Matter.FolderID.ToString(CultureInfo.InvariantCulture); var _reviewSetEntity = ReviewSetBO.GetReviewSetDetails(sMatterId, jobParameters.ReviewSetId); List <FilteredDocumentBusinessEntity> qualifiedDocuments = null; if (jobParameters.Activity.Equals(Constants.Add)) { qualifiedDocuments = GetQualifiedDocuments(jobParameters, jobParameters.datasetId.ToString(CultureInfo.InvariantCulture), sMatterId, _reviewSetEntity.BinderId, Constants.Add); } else if (jobParameters.Activity.Equals(Constants.Remove) || jobParameters.Activity.Equals(Constants.Archive)) { qualifiedDocuments = GetQualifiedDocuments(jobParameters, jobParameters.datasetId.ToString(CultureInfo.InvariantCulture), sMatterId, _reviewSetEntity.BinderId, Constants.Remove); } jobParameters.Documents.Clear(); if (qualifiedDocuments != null && qualifiedDocuments.Count > 0) { List <RVWDocumentBEO> iterationDocuments = qualifiedDocuments. Select(d => new RVWDocumentBEO { DocumentId = d.Id, MatterId = Convert.ToInt64(d.MatterId), CollectionId = d.CollectionId, FamilyId = d.FamilyId, DocumentControlNumber = d.DCN, DuplicateId = d.DuplicateId }).ToList(); jobParameters.Documents.AddRange(iterationDocuments); } UpdateReviewSetTaskBEO updateReviewSetTaskBeo = ConvertToTaskBeo(jobParameters, _reviewSetEntity); tasks.Add(updateReviewSetTaskBeo); for (int i = 1; i <= tasks.Count; i++) { tasks[i - 1].TaskNumber = i; } } else { lastCommitedTaskCount = 0; EvLog.WriteEntry(Constants.JobLogName + Constants.GenerateTasks, Constants.JobParamND, EventLogEntryType.Error); JobLogInfo.AddParameters(Constants.JobParamND); JobLogInfo.IsError = true; } } catch (Exception ex) { LogException(JobLogInfo, ex, LogCategory.Job, ErrorCodes.ProblemInGenerateTasks, string.Empty); } return(tasks); }
/// <summary> /// This method processes the pipe message /// </summary> /// <param name="envelope"></param> protected override void ProcessMessage(PipeMessageEnvelope envelope) { var searchRecord = (ReviewsetSearchRecord)envelope.Body; // assert checks searchRecord.ShouldNotBe(null); searchRecord.QueryEntity.ShouldNotBe(null); searchRecord.ReviewsetDetails.ShouldNotBe(null); try { // Initialize config values GetConfigurationValues(); searchRecord.ReviewsetDetails.CreatedBy.ShouldNotBeEmpty(); _createdBy = searchRecord.ReviewsetDetails.CreatedBy; DocumentRecordCollection reviewsetDetail; // Convert the ReviewsetSearchRecord to DocumentRecordCollection type ConvertReviewsetSearchRecordToDocumentRecordCollection(searchRecord, out reviewsetDetail); _dataset = DataSetBO.GetDataSetDetailForDataSetId(searchRecord.ReviewsetDetails.DatasetId); var documents=new List<DocumentIdentityRecord>(); var reviewsetLogic = searchRecord.ReviewsetDetails.ReviewSetLogic.ToLower(); if (reviewsetLogic == "all" || reviewsetLogic == "tag") { var searchQuery = !string.IsNullOrEmpty(_jobParameter.SearchQuery)? _jobParameter.SearchQuery.Replace("\"", ""): string.Empty; Tracer.Info("Get documents from database to create reviewset is started for All/Tag options - job run id : {0}", PipelineId); var resultDocuments= DocumentBO.GetDocumentsForCreateReviewsetJob(searchRecord.QueryEntity.QueryObject.MatterId, _dataset.CollectionId,searchRecord.TotalDocumentCount, reviewsetLogic, searchQuery.ToLower(), _batchSize); documents.AddRange(resultDocuments.Select(resultDocument => new DocumentIdentityRecord { Id = resultDocument.Id, DocumentId = resultDocument.DocumentID, FamilyId = resultDocument.FamilyID, DuplicateId = resultDocument.DuplicateId })); Tracer.Info("Documents retrieved from database to create review set for All/Tag options - job run id : {0}", PipelineId); } else { documents = GetDocuments(searchRecord); } if (documents == null || !documents.Any()) { Tracer.Error("No documents found for the job run id : {0}", PipelineId); LogMessage(false, string.Format("No documents found for the job run id : {0}", PipelineId), _createdBy, searchRecord.ReviewsetDetails.ReviewSetName); return; } Tracer.Info("Total of {0} documents found for the job run id : {1}", documents.Count.ToString(), PipelineId); LogMessage(true, string.Format("Total of {0} documents found for the job run id : {1}", documents.Count, PipelineId), _createdBy, searchRecord.ReviewsetDetails.ReviewSetName); // Group the results and send it in batches GroupDocumentsAndSend(documents, reviewsetDetail); } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); LogMessage(false, ex.ToUserString(), searchRecord.ReviewsetDetails.CreatedBy, searchRecord.ReviewsetDetails.ReviewSetName); } }
/// <summary> /// Constructs and returns the document search query entity /// </summary> /// <param name="jobParameters">The job parameters.</param> /// <param name="datasetEntity">The dataset entity.</param> /// <returns></returns> private DocumentQueryEntity GetQueryEntity(UpdateReviewSetJobBEO jobParameters, DatasetBEO datasetEntity, int startIndex, int documentCount, List<Field> outputFields) { var documentQueryEntity = new DocumentQueryEntity { QueryObject = new SearchQueryEntity { MatterId = datasetEntity.Matter.FolderID, IsConceptSearchEnabled = jobParameters.DocumentSelectionContext.SearchContext.IsConceptSearchEnabled, DatasetId = datasetEntity.FolderID, ReviewsetId = jobParameters.ReviewSetId } }; documentQueryEntity.DocumentStartIndex = startIndex; documentQueryEntity.DocumentCount = documentCount; documentQueryEntity.SortFields.Add(new Sort {SortBy = Constants.Relevance}); // Include families & duplicates is obsolete...So no need to include families & duplicates as part of search-engine search.. Always set to false; documentQueryEntity.IgnoreDocumentSnippet = true; documentQueryEntity.IncludeDuplicates = true; documentQueryEntity.IncludeFamilies = true; documentQueryEntity.TotalRecallConfigEntity.IsTotalRecall = true; if (outputFields != null && outputFields.Any()) { documentQueryEntity.OutputFields.AddRange(outputFields); } var tmpQuery = string.Empty; var selectionQuery = string.Empty; if (!string.IsNullOrEmpty(jobParameters.DocumentSelectionContext.SearchContext.Query)) { tmpQuery = jobParameters.DocumentSelectionContext.SearchContext.Query; } switch (jobParameters.DocumentSelectionContext.GenerateDocumentMode) { case DocumentSelectMode.UseSelectedDocuments: { jobParameters.DocumentSelectionContext.SelectedDocuments.ForEach(d => selectionQuery += string.Format("{0}:\"{1}\" OR ", EVSystemFields.DocumentKey, d)); if (!string.IsNullOrEmpty(selectionQuery)) { selectionQuery = selectionQuery.Substring(0, selectionQuery.LastIndexOf(" OR ")); tmpQuery = string.Format("({0} AND {1})", tmpQuery, selectionQuery); } break; } case DocumentSelectMode.QueryAndExclude: { jobParameters.DocumentSelectionContext.DocumentsToExclude.ForEach(d => selectionQuery += string.Format("(NOT {0}:\"{1}\") AND ", EVSystemFields.DocumentKey, d)); if (!string.IsNullOrEmpty(selectionQuery)) { selectionQuery = selectionQuery.Substring(0, selectionQuery.LastIndexOf(" AND ")); tmpQuery = string.Format("({0} AND {1})", tmpQuery, selectionQuery); } break; } } documentQueryEntity.QueryObject.QueryList.Clear(); documentQueryEntity.QueryObject.QueryList.Add(new Query {SearchQuery = tmpQuery}); return documentQueryEntity; }
/// <summary> /// Absorb the boot parameters, deserialize and pass on the messages to the Search Worker /// </summary> public void DoBeginWork(string bootParameter) { bootParameter.ShouldNotBeEmpty(); // Deserialize and determine the boot object _bootObject = GetBootObject(bootParameter); // Assert condition to check for jobscheduled by _bootObject.CreatedByGUID.ShouldNotBeEmpty(); // Get Dataset Details to know about the Collection id and the Matter ID details _datasetEntity = DataSetBO.GetDataSetDetailForDataSetId(_bootObject.datasetId); _bootObject.BinderFolderId.ShouldNotBe(0); _binderEntity = BinderBO.GetBinderDetails(_bootObject.BinderFolderId.ToString()); _binderEntity.ShouldNotBe(null); //Assert condition to check for dataset details _datasetEntity.ShouldNotBe(null); _datasetEntity.Matter.ShouldNotBe(null); _reviewSetRecord = ConvertToReviewSetRecord(_bootObject, _datasetEntity); // Construct the document query entity to determine the total documents _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, 1, null); _docQueryEntity.TransactionName = _docQueryEntity.QueryObject.TransactionName = "ReviewsetStartupWorker - DoBeginWork (GetCount)"; // Mock the user session MockSession(); var reviewSetDetails = ReviewSetBO.GetReviewSetDetails(_datasetEntity.Matter.FolderID.ToString(), _bootObject.ReviewSetId); if (reviewSetDetails != null) { reviewSetDetails.Action = _reviewSetRecord.Activity; reviewSetDetails.BinderName = _binderEntity.BinderName; //Audit Logging for existing review set ReviewSetBO.UpdateReviewSet(reviewSetDetails, false, false); } // Retrieve the total documents qualified _totalDocumentCount = ReviewerSearchInstance.GetDocumentCount(_docQueryEntity.QueryObject); Tracer.Info("Split Reviewset Startup Worker : {0} matching documents determined for the requested query", _totalDocumentCount); if (_totalDocumentCount < 1) { var message = String.Format("Search engine does not return any documents for Reviewset {0}", _reviewSetRecord.ReviewSetName); throw new ApplicationException(message); } // Construct the document query entity to write the resultant documents in xml file var outputFields = new List<Field>(); outputFields.AddRange(new List<Field> { new Field {FieldName = EVSystemFields.FamilyId}, new Field {FieldName = EVSystemFields.DocumentKey}, new Field {FieldName = EVSystemFields.ReviewSetId}, new Field {FieldName = EVSystemFields.DuplicateId}, new Field {FieldName = EVSystemFields.Tag.ToLower()}, new Field {FieldName = _datasetEntity.DocumentControlNumberName} }); _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, Convert.ToInt32(_totalDocumentCount), outputFields); }
/// <summary> /// To set field id for created fields /// </summary> /// <param name="fields"></param> private void SetFieldIdForCreatedFields(List<LawFieldBEO> fields) { _datasetDetails = DataSetBO.GetDataSetDetailForDataSetId(_jobParams.FolderId); foreach (var field in fields) { var datasetField = _datasetDetails.DatasetFieldList.Find(f => f.Name.Equals(field.MappingFieldName)); if (datasetField != null) { field.MappingFieldId = datasetField.ID.ToString(CultureInfo.InvariantCulture); } } _selectedFields = fields; }