/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _reprocessJobParameter = (ConversionReprocessJobBeo)XmlUtility.DeserializeObject(BootParameters, typeof(ConversionReprocessJobBeo)); var baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(_reprocessJobParameter.OrginialJobId)); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(baseConfig.BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); }
/// <summary> /// To create dataset fields that are selected for law imprort /// </summary> private void CreateSelectedLawFields() { if (_jobParams == null) { _jobParams = GetJobParams(BootParameters); } _selectedFields = _jobParams.MappingFields; var dataSetNewFields = new List <FieldBEO>(); var dataTypes = DataSetTemplateBO.GetDataTypeAndDataFormatList(); var dataSetExistingFields = DataSetBO.GetDataSetFields(_jobParams.DatasetId, _jobParams.CollectionId); dataSetExistingFields = dataSetExistingFields ?? new List <FieldBEO>(); foreach (var mappedField in _jobParams.MappingFields.FindAll(x => string.IsNullOrEmpty(x.MappingFieldId))) { if (string.IsNullOrEmpty(mappedField.MappingFieldName)) { continue; } var dataSetField = dataSetExistingFields.FirstOrDefault( dsField => !string.IsNullOrEmpty(dsField.Name) && dsField.Name.ToLower().Equals(mappedField.MappingFieldName.ToLower())); if (dataSetField != null) { //If mapped field already exists in dataset then don't create continue; } var field = new FieldBEO { Name = mappedField.MappingFieldName, IsReadable = true, IsSingleEntry = true, IsHiddenField = false, IsValidateDateValues = true, CharacterLength = 10, ModifiedBy = _jobParams.CreatedBy }; SetFieldDataType(field, mappedField, dataTypes); dataSetNewFields.Add(field); } if (!dataSetNewFields.Any()) { return; } DataSetBO.AddBulkFields(_jobParams.FolderId, dataSetNewFields, _jobParams.CreatedBy); SetFieldIdForCreatedFields(_jobParams.MappingFields); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _analyticProject = new AnalyticsProject(); _totalDocumentCount = _analyticProject.GetProjectDocumentsCount(Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId); _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("UpdateFieldsBatchSize", "AnalyticsProject")); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); }
/// <summary> /// Unassigns the reviewset id for the list of documents from Vault /// </summary> /// <param name="documents">List<ReviewsetDocumentBEO></param> /// <param name="datasetId">long</param> private void UnAssignReviewsetInVault(List <ReviewsetDocumentBEO> documents, long datasetId) { //adds to DB using (var transScope = new EVTransactionScope(TransactionScopeOption.Suppress)) { //Get dataset details var dsBeo = DataSetBO.GetDataSetDetailForDataSetId(datasetId); dsBeo.ShouldNotBe(null); //Remove the reviewset association for the documents ReviewSetBO.DeleteDocumentsFromReviewSetForOverdrive(dsBeo.Matter.FolderID.ToString(), dsBeo.CollectionId, documents); } }
string GetDcnFieldValue (string matterId, string collectionId, string documentId) { matterId.ShouldNotBeEmpty(); collectionId.ShouldNotBeEmpty(); documentId.ShouldNotBeEmpty(); var fields = DataSetBO.GetDatasetFieldsOfType(matterId, new Guid(collectionId), Constants.DCNFieldTypeId); var dcnFieldValue = string.Empty; if (fields.Count > 0) { var dcnField = DocumentBO.GetDocumentFieldById(matterId, collectionId, documentId, fields[0].ID); dcnFieldValue = dcnField.FieldValue; } return(string.IsNullOrEmpty(dcnFieldValue) ? string.Empty : dcnFieldValue); }
/// <summary> /// Worker begin work event /// </summary> protected override void BeginWork() { try { base.BeginWork(); _jobParams = GetJobParams(BootParameters); _jobParams.ShouldNotBe(null); _jobParams.FolderId.ShouldBeGreaterThan(0); _datasetDetails = DataSetBO.GetDataSetDetailForDataSetId(_jobParams.FolderId); var matterDetails = MatterDAO.GetMatterDetails(_jobParams.MatterId.ToString(CultureInfo.InvariantCulture)); matterDetails.ShouldNotBe(null); _datasetDetails.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); searchServerDetails.ShouldNotBe(null); _datasetDetails.Matter.SearchServer = searchServerDetails; if (!LawBO.TestServerConnection(_jobParams.LawCaseId)) { ReportToDirector("Failed to connect Law server. Please see application log for details."); } if (EVHttpContext.CurrentContext == null) { // Moq the session MockSession(_jobParams.CreatedBy); } //Create fields for selected law fields CreateSelectedLawFields(); //Create tags for selected law tags CreateSelectedLawTags(); //Law import batch size for documents _batchSize = GetMessageBatchSize(); } catch (Exception ex) { //Send log infor to Log worker LogMessage(false, ex.ToUserString()); ReportToDirector(ex.ToUserString()); throw; } }
public List <ReconversionDocumentBEO> GetDocumentsFromReprocessSelection( string inputFilePath, ReProcessJobSelectionMode selectionMode, long matterId, long datasetId, long jobId, string filters = null) { var reprocessDicumentList = new List <ReconversionDocumentBEO>(); switch (selectionMode) { case ReProcessJobSelectionMode.Selected: { var docidList = ConversionReprocessStartupHelper.GetDocumentIdListFromFile(inputFilePath, Constants.DocId); reprocessDicumentList.AddRange(ConversionReprocessStartupHelper.GetImportDocumentListForIDList(docidList, Constants.DocId, null, matterId)); break; } case ReProcessJobSelectionMode.CrossReference: { var docidList = ConversionReprocessStartupHelper.GetDocumentIdListFromFile(inputFilePath, Constants.DCN); reprocessDicumentList.AddRange(ConversionReprocessStartupHelper.GetImportDocumentListForIDList(docidList, Constants.DCN, _dataset.CollectionId, matterId)); break; } case ReProcessJobSelectionMode.Csv: var dictIds = ConversionReprocessStartupHelper.GetDocumentIdListFromFile(inputFilePath, Constants.DCN, Constants.DocumentSetName); var lstDocumentSet = DataSetBO.GetAllDocumentSet(datasetId.ToString(CultureInfo.InvariantCulture)); foreach (var key in dictIds.Keys) { var firstOrDefault = lstDocumentSet.FirstOrDefault(d => d.DocumentSetName.Equals(key)); if (firstOrDefault == null) { continue; } var collectionId = firstOrDefault.DocumentSetId; reprocessDicumentList.AddRange(ConversionReprocessStartupHelper.GetImportDocumentListForIDList(dictIds[key], Constants.DCN, collectionId, matterId)); } break; case ReProcessJobSelectionMode.All: reprocessDicumentList.AddRange(ConversionReprocessStartupHelper.GetReconversionDocumentBeosForJobId(matterId, jobId, filters)); break; } return(reprocessDicumentList); }
/// <summary> /// Deletes the DataSet From EvMaster /// </summary> /// <param name="task">DeleteDataSetTaskBEO</param> /// <param name="jobParameters">DeleteDataSetJobBEO</param> private void DeleteDataSetFromEvMaster(DeleteDataSetTaskBEO task, DeleteDataSetJobBEO jobParameters) { if (jobParameters != null) { string datasetUuid = string.Format("{0}{1}{2}{3}{4}{5}{6}{7}{8}{9}{10}", Constants.Matter, Constants.Hyphen, jobParameters.MatterId, Constants.Hyphen, Constants.Dataset, Constants.Hyphen, jobParameters.DataSetId, Constants.Hyphen, Constants.Collection, Constants.Hyphen, jobParameters.CollectionId); DataSetBO.DeleteDataSetFromEVMaster(datasetUuid); var externalizationEnabled = Convert.ToBoolean(CmgServiceConfigBO.GetServiceConfigurationsforConfig(Constants.ExternalizationConfiguration)); if (externalizationEnabled) { DocumentBO.DeleteExternalization(jobParameters.MatterId, task.ExtractionPath, task.DocumentSets); } } }
private string CreateDocumentSet(DocumentSetBEO documentSet) { string imagesetid; try { documentSet.DocumentSetTypeId = Constants.ImageType; documentSet.ParentId = ProfileBEO.DatasetDetails.CollectionId; imagesetid = DataSetBO.CreateDocumentSet(documentSet); } catch (Exception ex) { Tracer.Error("DcbSlicer: Failed to create image set. {0}", ex); imagesetid = String.Empty; } return(imagesetid); }
private DcbOpticonJobBEO PopulateImportRequest(ProfileBEO profiledata) { DcbOpticonJobBEO request = new DcbOpticonJobBEO(); request.JobTypeName = profiledata.ImportTypeName; request.JobName = profiledata.ImportJobName; request.SysDocId = profiledata.SysDocID; request.SysImportType = profiledata.SysImportTypeID; // Default settings request.StatusBrokerType = BrokerType.Database; request.CommitIntervalBrokerType = BrokerType.ConfigFile; request.CommitIntervalSettingType = SettingType.CommonSetting; //MatterName request.MatterName = profiledata.DatasetDetails.Matter.FolderName; //Source Path request.DcbSourcePath = profiledata.Locations[0].ToString(CultureInfo.InvariantCulture); //Target DatasetId request.TargetDatasetId = profiledata.DatasetDetails.CollectionId; //DatasetFolderId request.DatasetFolderId = profiledata.DatasetDetails.FolderID; //fieldMappinga request.FieldMappings = profiledata.FieldMapping; //ContentFieldMappings request.ContentFields = profiledata.ContentFields; request.MatterId = profiledata.DatasetDetails.Matter.FolderID; request.IncludeTags = profiledata.IncludeAssociatedTags; request.IncludeNotes = profiledata.IncludeNotes; request.DcbCredentialList = profiledata.DcbUNPWs; request.NativeFilePath = profiledata.NativeFilePathField; request.ImageSetName = profiledata.ImageSetName; request.ImportImages = profiledata.IsImportImages; request.NewImageset = profiledata.IsNewImageSet; request.JobName = profiledata.ImportJobName; _dataset = DataSetBO.GetDataSetDetailForDataSetId(request.DatasetFolderId); //Populate Family Info request.IsImportFamilies = profiledata.IsImportFamilyRelations; request.FamilyRelations = profiledata.FamilyRelations; return(request); }
protected override void BeginWork() { base.BeginWork(); //Creating a stringReader stream for the bootparameter var stream = new StringReader(BootParameters); //Ceating xmlStream for xmlserialization var xmlStream = new XmlSerializer(typeof(BulkPrintServiceRequestBEO)); //Deserialization of bootparameter to get BulkPrintServiceRequestBEO bulkPrintServiceRequestBEO = (BulkPrintServiceRequestBEO)xmlStream.Deserialize(stream); sharedLocation = bulkPrintServiceRequestBEO.FolderPath; mappedPrinter = PrinterManagementBusiness.GetMappedPrinter(new MappedPrinterIdentifierBEO(bulkPrintServiceRequestBEO.Printer.UniqueIdentifier.Split(Constants.Split).Last(), true)); sourceLocation = Path.Combine(Path.Combine(sharedLocation, bulkPrintServiceRequestBEO.Name), Constants.SourceDirectoryPath); jobRunId = (!String.IsNullOrEmpty(PipelineId)) ? Convert.ToInt32(PipelineId) : 0; //Get Dataset details for a given Collection Id _mDataSet = DataSetBO.GetDataSetDetailForCollectionId(bulkPrintServiceRequestBEO.DataSet.CollectionId); }
/// <summary> /// Get dataset detail. /// </summary> internal static DatasetBEO GetDatasetDetails(long datasetId, string matterId) { var dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(datasetId)); var matterDetails = MatterDAO.GetMatterDetails(matterId); if (matterDetails == null) { return(dataset); } dataset.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); if (searchServerDetails != null) { dataset.Matter.SearchServer = searchServerDetails; } return(dataset); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _totalDocumentCount = _analyticProject.GetSelectedDocumentsCount(_dataset.CollectionId, _jobParameter, WorkAssignment.JobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; IncreaseProcessedDocumentsCount(_totalDocumentCount); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; if (!_jobParameter.IsAddAdditionalDocuments || !string.IsNullOrEmpty(_jobIds)) { return; } _jobIds = GetIncludeJobIds(); }
/// <summary> /// End of the work. process /// Update the job's boot parameter with total number of document processed and number of failed documents /// </summary> protected override void EndWork() { var bootParam = GetBootObject <ConversionReprocessJobBeo>(BootParameters); //the corresponding job id int jobId = WorkAssignment.JobId; //get various count DatasetBEO ds = DataSetBO.GetDataSetDetailForDataSetId(bootParam.DatasetId); long matterId = ds.Matter.FolderID; var vault = VaultRepository.CreateRepository(matterId); int totalDocCount; int failedDocCount; int succeedDocCount; vault.GetReconversionDocStatusCount(jobId, out totalDocCount, out succeedDocCount, out failedDocCount); bootParam.TotalDocCount = totalDocCount; bootParam.FailedDocCount = failedDocCount; //bootParam.SucceedDocCount = succeedDocCount; //re serialize the boot param string newBootParam; var serializer = new XmlSerializer(typeof(ConversionReprocessJobBeo)); using (var writer = new StringWriter()) { serializer.Serialize(writer, bootParam); newBootParam = writer.ToString(); } //update the boot parameters for the job ReconversionDAO.UpdateReconversionBootParamter(jobId, newBootParam); //clean up reconversion input file that contain the list of document to convert SafeDeleteFile(bootParam.FilePath); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _documentBachSize = Convert.ToInt32( ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectInSubSystemJobBatchSize", "AnalyticsProject")); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; _totalDocumentCount = _analyticProject.GetProjectDocumentsCountByTaskId( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId, _jobParameter.PrimarySystemJobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); if (_jobParameter.IsRerunJob || _jobParameter.IsAddAdditionalDocuments) //Rerun job or Add additional documents- need get to get existing IndexId ,if already created { _indexId = AnalyticsProject.GetIndexIdForProject(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, false); } if (string.IsNullOrEmpty(_indexId)) { _indexId = "idx-" + Guid.NewGuid().ToString().ToLowerInvariant(); _analyticProject.InsertIndexId(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, _indexId); } AnalyticsProject.CreateAnalyticalIndex(_jobParameter.MatterId, WorkAssignment.JobId, _indexId); //Create Index in Spark SVM.. IncreaseProcessedDocumentsCount(_totalDocumentCount); }
/// <summary> /// Get Dataset details /// </summary> private DatasetBEO GetDatasetDetails(long datasetId, long matterId) { var dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(datasetId)); dataset.ShouldNotBe(null); var matterDetails = MatterDAO.GetMatterDetails(matterId.ToString(CultureInfo.InvariantCulture)); if (matterDetails == null) { return(dataset); } dataset.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); if (searchServerDetails != null) { dataset.Matter.SearchServer = searchServerDetails; } return(dataset); }
/// <summary> /// Get the tag location /// </summary> /// <param name="bulkTagRecord"></param> /// <returns></returns> private static string GetTaggingLocation(BulkTagJobBusinessEntity bulkTagJobBeo) { var location = string.Empty; if (!string.IsNullOrEmpty(bulkTagJobBeo.DocumentListDetails.SearchContext.ReviewSetId)) { var reviewSetDetails = ReviewSetBO.GetReviewSetDetails (bulkTagJobBeo.DocumentListDetails.SearchContext.MatterId.ToString(CultureInfo.InvariantCulture), bulkTagJobBeo.DocumentListDetails.SearchContext.ReviewSetId); if (reviewSetDetails != null) { location = reviewSetDetails.ReviewSetName; } } else { var dataSetDetails = DataSetBO.GetDataSetDetailForDataSetId(bulkTagJobBeo.DocumentListDetails.SearchContext.DataSetId); if (dataSetDetails != null) { location = dataSetDetails.FolderName; } } return(location); }
/// <summary> /// Absorb the boot parameters, deserialize and pass on the messages to the Search Worker /// </summary> public void DoBeginWork(string bootParameter) { bootParameter.ShouldNotBeEmpty(); // Deserialize and determine the boot object _bootObject = GetBootObject(bootParameter); // Assert condition to check for jobscheduled by _bootObject.CreatedByGUID.ShouldNotBeEmpty(); // Get Dataset Details to know about the Collection id and the Matter ID details _datasetEntity = DataSetBO.GetDataSetDetailForDataSetId(_bootObject.datasetId); _bootObject.BinderFolderId.ShouldNotBe(0); _binderEntity = BinderBO.GetBinderDetails(_bootObject.BinderFolderId.ToString()); _binderEntity.ShouldNotBe(null); //Assert condition to check for dataset details _datasetEntity.ShouldNotBe(null); _datasetEntity.Matter.ShouldNotBe(null); _reviewSetRecord = ConvertToReviewSetRecord(_bootObject, _datasetEntity); // Construct the document query entity to determine the total documents _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, 1, null); _docQueryEntity.TransactionName = _docQueryEntity.QueryObject.TransactionName = "ReviewsetStartupWorker - DoBeginWork (GetCount)"; // Mock the user session MockSession(); var reviewSetDetails = ReviewSetBO.GetReviewSetDetails(_datasetEntity.Matter.FolderID.ToString(), _bootObject.ReviewSetId); if (reviewSetDetails != null) { reviewSetDetails.Action = _reviewSetRecord.Activity; reviewSetDetails.BinderName = _binderEntity.BinderName; //Audit Logging for existing review set ReviewSetBO.UpdateReviewSet(reviewSetDetails, false, false); } // Retrieve the total documents qualified _totalDocumentCount = ReviewerSearchInstance.GetDocumentCount(_docQueryEntity.QueryObject); Tracer.Info("Split Reviewset Startup Worker : {0} matching documents determined for the requested query", _totalDocumentCount); if (_totalDocumentCount < 1) { var message = String.Format("Search engine does not return any documents for Reviewset {0}", _reviewSetRecord.ReviewSetName); throw new ApplicationException(message); } // Construct the document query entity to write the resultant documents in xml file var outputFields = new List <Field>(); outputFields.AddRange(new List <Field> { new Field { FieldName = EVSystemFields.FamilyId }, new Field { FieldName = EVSystemFields.DocumentKey }, new Field { FieldName = EVSystemFields.ReviewSetId }, new Field { FieldName = EVSystemFields.DuplicateId }, new Field { FieldName = EVSystemFields.Tag.ToLower() }, new Field { FieldName = _datasetEntity.DocumentControlNumberName } }); _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, Convert.ToInt32(_totalDocumentCount), outputFields); }
/// <summary> /// This method processes the pipe message /// </summary> /// <param name="envelope"></param> protected override void ProcessMessage(PipeMessageEnvelope envelope) { var searchRecord = (ReviewsetSearchRecord)envelope.Body; // assert checks searchRecord.ShouldNotBe(null); searchRecord.QueryEntity.ShouldNotBe(null); searchRecord.ReviewsetDetails.ShouldNotBe(null); try { // Initialize config values GetConfigurationValues(); searchRecord.ReviewsetDetails.CreatedBy.ShouldNotBeEmpty(); _createdBy = searchRecord.ReviewsetDetails.CreatedBy; DocumentRecordCollection reviewsetDetail; // Convert the ReviewsetSearchRecord to DocumentRecordCollection type ConvertReviewsetSearchRecordToDocumentRecordCollection(searchRecord, out reviewsetDetail); _dataset = DataSetBO.GetDataSetDetailForDataSetId(searchRecord.ReviewsetDetails.DatasetId); var documents = new List <DocumentIdentityRecord>(); var reviewsetLogic = searchRecord.ReviewsetDetails.ReviewSetLogic.ToLower(); if (reviewsetLogic == "all" || reviewsetLogic == "tag") { var searchQuery = !string.IsNullOrEmpty(_jobParameter.SearchQuery)? _jobParameter.SearchQuery.Replace("\"", ""): string.Empty; Tracer.Info("Get documents from database to create reviewset is started for All/Tag options - job run id : {0}", PipelineId); var resultDocuments = DocumentBO.GetDocumentsForCreateReviewsetJob(searchRecord.QueryEntity.QueryObject.MatterId, _dataset.CollectionId, searchRecord.TotalDocumentCount, reviewsetLogic, searchQuery.ToLower(), _batchSize); documents.AddRange(resultDocuments.Select(resultDocument => new DocumentIdentityRecord { Id = resultDocument.Id, DocumentId = resultDocument.DocumentID, FamilyId = resultDocument.FamilyID, DuplicateId = resultDocument.DuplicateId })); Tracer.Info("Documents retrieved from database to create review set for All/Tag options - job run id : {0}", PipelineId); } else { documents = GetDocuments(searchRecord); } if (documents == null || !documents.Any()) { Tracer.Error("No documents found for the job run id : {0}", PipelineId); LogMessage(false, string.Format("No documents found for the job run id : {0}", PipelineId), _createdBy, searchRecord.ReviewsetDetails.ReviewSetName); return; } Tracer.Info("Total of {0} documents found for the job run id : {1}", documents.Count.ToString(), PipelineId); LogMessage(true, string.Format("Total of {0} documents found for the job run id : {1}", documents.Count, PipelineId), _createdBy, searchRecord.ReviewsetDetails.ReviewSetName); // Group the results and send it in batches GroupDocumentsAndSend(documents, reviewsetDetail); } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); LogMessage(false, ex.ToUserString(), searchRecord.ReviewsetDetails.CreatedBy, searchRecord.ReviewsetDetails.ReviewSetName); } }
private void AssignDocumentControlNumber(List <LoadFileRecord> records) { long numericPartOfDcn = 0; #region Delegate - logic to get complete DCN readily, given the numeric value as input Func <string, string> getDcn = delegate(string newLastDcnNumericPart) { var padString = string.Empty; // pad zeros if (newLastDcnNumericPart.Length < m_Dataset.DCNStartWidth.Length) { var numberOfZerosTobePadded = m_Dataset.DCNStartWidth.Length - newLastDcnNumericPart.Length; for (var i = 0; i < numberOfZerosTobePadded; i++) { padString += "0"; } } return(m_Dataset.DCNPrefix + padString + newLastDcnNumericPart); }; #endregion Delegate - logic to get complete DCN readily, given the numeric value as input using (var lowerTransScope = new EVTransactionScope(TransactionScopeOption.Suppress)) { m_CurrentDcn = DataSetBO.GetLastDocumentControlNumber(m_Dataset.FolderID); // If DCN is not obtained, no documents are imported for the dataset till now. // So set current DCN to first DCN value. if (string.IsNullOrWhiteSpace(m_CurrentDcn)) { m_CurrentDcn = m_Dataset.DCNPrefix + Constants.StringZero; } else { if (!m_CurrentDcn.Contains(m_Dataset.DCNPrefix)) { var currentNumber = Convert.ToInt32(m_CurrentDcn); currentNumber = currentNumber - 1; m_CurrentDcn = currentNumber.ToString(); m_CurrentDcn = m_Dataset.DCNPrefix + m_CurrentDcn; } } // 1) Get Last DCN from EVMaster DB and 2) Pick Numeric part of it // throws exception if numeric part couldn't be retrieved, throw Exception. if (IsNumeric(m_CurrentDcn.Substring(m_Dataset.DCNPrefix.Length), out numericPartOfDcn)) { // Update new DCN after bulk add, assuming bulk add would be successful. // The delegate, GetNewLastDCNAfterBulkAdd gets DCN to be updated back to DB. // Delegates takes numeric part of WOULD BE DCN value as input, returns complete DCN - so that it can readily be updated back to Dataset table. m_NewDcn = getDcn((numericPartOfDcn + records.Count()).ToString(CultureInfo.InvariantCulture)); DataSetBO.UpdateLastDocumentControlNumber(m_Dataset.FolderID, m_NewDcn); lowerTransScope.Complete(); } else { throw new Exception(ErrorCodes.InvalidDCNValueObtainedForDataset); } } #region Assign DCN to all documents var dCNIncrementalCounter = numericPartOfDcn; records.ForEach(p => { dCNIncrementalCounter += 1; p.DocumentControlNumber = getDcn(dCNIncrementalCounter.ToString(CultureInfo.InvariantCulture)); }); #endregion }
/// <summary> /// Generates DataSet Delete tasks /// </summary> /// <param name="jobParameters">DataSet Delete Job BEO</param> /// <param name="previouslyCommittedTaskCount">int</param> /// <returns>List of DataSet DeleteJob Tasks (BEOs)</returns> protected override Tasks <DeleteDataSetTaskBEO> GenerateTasks(DeleteDataSetJobBEO jobParameters, out int previouslyCommittedTaskCount) { Tasks <DeleteDataSetTaskBEO> tasks = null; previouslyCommittedTaskCount = 0; try { // Message that Generate Tasks called. LogMessage(String.Format(Constants.JobGenerateTasksInitialized, jobParameters.JobId), false, LogCategory.Job, null); EvLog.WriteEntry(String.Format(Constants.JobGenerateTasksInitialized, jobParameters.JobId), String.Format(Constants.JobGenerateTasksInitialized, jobParameters.JobId), EventLogEntryType.Information); var docCount = MatterBO.GetDocumentCount(Convert.ToInt64(jobParameters.MatterId), new List <string>() { jobParameters.CollectionId }); var dataSetDocuments = new List <ReIndexDocumentBEO>(); if (docCount > 0) { var nMessages = (Int32)(Math.Ceiling((double)docCount / m_ReadChunkSize)); //Convert.ToInt32(docCount / m_ReadChunkSize) + (docCount % m_ReadChunkSize > 0 ? 1 : 0); var processed = 0; //Loop through and send the request in batches for (var pageIdx = 1; pageIdx <= nMessages; pageIdx++) { var pgSize = 0; //Determine the page size and processed count if (nMessages == 1) { pgSize = (Int32)docCount; } else if (nMessages > 1 && pageIdx == nMessages) { pgSize = (Int32)docCount - processed; } else { pgSize = m_ReadChunkSize; } var batchDocuments = MatterBO.GetCollectionDocuments(Convert.ToInt64(jobParameters.MatterId), pageIdx, m_ReadChunkSize, new List <string>() { jobParameters.CollectionId }); if (batchDocuments != null && batchDocuments.Any()) { dataSetDocuments.AddRange(batchDocuments); } processed += pgSize; } } //Get matter details for matter id var matterDetail = MatterServices.GetMatterDetails(jobParameters.MatterId); var dataSetDetail = DataSetBO.GetDataSetDetailForDataSetId(jobParameters.DataSetId); //Get all document sets for dataset id var lstDocumentSet = DataSetService.GetAllDocumentSet(jobParameters.DataSetId.ToString()); tasks = GetTaskList <DeleteDataSetJobBEO, DeleteDataSetTaskBEO>(jobParameters); previouslyCommittedTaskCount = tasks.Count; var documentTaskCount = (Int32)(Math.Ceiling((double)dataSetDocuments.Count / m_WindowSize)); m_NumberOfTasks = documentTaskCount + lstDocumentSet.Count + 1; double taskPercent = (100.0 / m_NumberOfTasks); int taskNumber = 0; DeleteDataSetTaskBEO deleteDataSetTaskBeo; // Create tasks for the documents in group for (taskNumber = 0; taskNumber < documentTaskCount; taskNumber++) { deleteDataSetTaskBeo = new DeleteDataSetTaskBEO { TaskNumber = taskNumber + 1, TaskComplete = false, TaskPercent = taskPercent, DataSetId = jobParameters.DataSetId, DataSetName = jobParameters.DataSetName, DocumentSetId = jobParameters.CollectionId, DeletedBy = jobParameters.DeletedBy, DocumentId = dataSetDocuments.GetRange(taskNumber * m_WindowSize, Math.Min((dataSetDocuments.Count - (taskNumber * m_WindowSize)), m_WindowSize)) .Select(d => d.DocumentReferenceId) .ToList(), MatterDBName = matterDetail.MatterDBName, IsDocumentDelete = true }; tasks.Add(deleteDataSetTaskBeo); } // Create the task for the non-native document sets (production & image sets) var nonNativeSets = lstDocumentSet.Where(ds => ds.DocumentSetTypeId != "2"); foreach (var docset in nonNativeSets) { taskNumber += 1; deleteDataSetTaskBeo = new DeleteDataSetTaskBEO { TaskNumber = taskNumber, TaskComplete = false, TaskPercent = taskPercent, DataSetId = jobParameters.DataSetId, DataSetName = jobParameters.DataSetName, DeletedBy = jobParameters.DeletedBy, DocumentId = new List <string>(), MatterDBName = matterDetail.MatterDBName, DocumentSetId = docset.DocumentSetId, DocumentSetTypeId = docset.DocumentSetTypeId, IsDocumentDelete = false }; tasks.Add(deleteDataSetTaskBeo); } // Create the task for only the native document sets var nativeSet = lstDocumentSet.Where(ds => ds.DocumentSetTypeId == "2"); foreach (var docset in nativeSet) { taskNumber += 1; deleteDataSetTaskBeo = new DeleteDataSetTaskBEO { TaskNumber = taskNumber, TaskComplete = false, TaskPercent = taskPercent, DataSetId = jobParameters.DataSetId, DataSetName = jobParameters.DataSetName, DeletedBy = jobParameters.DeletedBy, DocumentId = new List <string>(), MatterDBName = matterDetail.MatterDBName, DocumentSetId = docset.DocumentSetId, DocumentSetTypeId = docset.DocumentSetTypeId, IsDocumentDelete = false }; tasks.Add(deleteDataSetTaskBeo); } taskNumber += 1; deleteDataSetTaskBeo = new DeleteDataSetTaskBEO { TaskNumber = taskNumber, TaskComplete = false, TaskPercent = 100, DataSetId = jobParameters.DataSetId, DataSetName = jobParameters.DataSetName, DeletedBy = jobParameters.DeletedBy, DocumentId = new List <string>(), DocumentSetId = jobParameters.CollectionId, MatterDBName = matterDetail.MatterDBName, IsDocumentDelete = false, DocumentSets = lstDocumentSet, ExtractionPath = dataSetDetail.CompressedFileExtractionLocation, DocumentSetTypeId = string.Empty }; tasks.Add(deleteDataSetTaskBeo); for (int i = 1; i <= tasks.Count; i++) { tasks[i - 1].TaskNumber = i; } } catch (EVException ex) { LogToEventLog(ex, GetType(), MethodInfo.GetCurrentMethod().Name, jobParameters.JobId, jobParameters.JobRunId); HandleJobException(GetEvExceptionDescription(ex), null, ErrorCodes.ProblemInGenerateTasks); } catch (Exception ex) { // Handle exception in initialize HandleJobException(String.Format(Constants.JobGenerateTasksException, jobParameters.JobId), ex, ErrorCodes.ProblemInGenerateTasks); LogMessage(ex, GetType(), MethodInfo.GetCurrentMethod().Name, EventLogEntryType.Error, jobParameters.JobId, jobParameters.JobRunId); } return(tasks); }
private void Initialize(string bootParameter) { _parametersExportLoadFile = GetExportBEO <ExportLoadJobDetailBEO>(bootParameter); #region Assertion _parametersExportLoadFile.ShouldNotBe(null); _parametersExportLoadFile.ExportLoadFileInfo.FilePath.ShouldNotBeEmpty(); #endregion Directory.CreateDirectory(_parametersExportLoadFile.ExportLoadFileInfo.FilePath); if (!Utils.CanWriteToFolder(_parametersExportLoadFile.ExportLoadFileInfo.FilePath)) { Tracer.Error("ExportOption Startup Worker: Invalid export path for job run id:{0}", PipelineId); LogMessage(false, Constants.ExportPathInvalid); throw new EVException().AddUsrMsg(Constants.ExportPathInvalid); } if (_parametersExportLoadFile != null && _parametersExportLoadFile.ExportLoadFileInfo != null) { #region Get Dataset Details if (!string.IsNullOrEmpty(_parametersExportLoadFile.DatasetId) && !string.IsNullOrEmpty(_parametersExportLoadFile.MatterId)) { _dataset = DataSetBO.GetDatasetDetailsWithMatterInfo(Convert.ToInt64(_parametersExportLoadFile.DatasetId), _parametersExportLoadFile.MatterId); #region Assertion _dataset.ShouldNotBe(null); #endregion if (_dataset == null) { Tracer.Error("ExportOption Startup Worker: Cannot get dataset details for job run id: {0}", PipelineId); // TODO: Throw appropriate exception after analysis. } } #endregion #region Setup ExportOption Options _exportOption = new ExportOption { IsNative = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption != null && _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.IncludeNativeFile, IncludeNativeTagName = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption != null ? _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.TagToIncludeNative : string.Empty, IsText = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption != null && _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.IncludeTextFile, IsField = _parametersExportLoadFile.ExportLoadFields != null && _parametersExportLoadFile.ExportLoadFields.Count > 0, IsTag = _parametersExportLoadFile.ExportLoadTagInfo != null && _parametersExportLoadFile.ExportLoadTagInfo.IncludeTag, ExportDestinationFolderPath = _parametersExportLoadFile.ExportLoadFileInfo.FilePath != null ? _parametersExportLoadFile.ExportLoadFileInfo.FilePath.Replace(@"/", @"\") .TrimEnd(new[] { '\\' }) : string.Empty, TextOption1 = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.TextOption1, TextOption2 = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.TextOption2, FieldForNativeFileName = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.FieldForNativeFileName, FieldForTextFileName = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.Nameselection == TextFileNameSelection.UseOPT && !string.IsNullOrEmpty( _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.ImageFileName) ? _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.ImageFileName : _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.FieldForTextFileName, IsTextFieldToExportSelected = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.IsTextFieldToExportSelected, TextFieldToExport = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.TextFieldToExport }; switch (_parametersExportLoadFile.ExportLoadFileInfo.PriImgSelection) { case SetSelection.ProductionSet: _exportOption.IsProduction = true; _exportOption.ProductionSetCollectionId = _parametersExportLoadFile.ExportLoadFileInfo.ProdImgCollectionId; break; case SetSelection.ImageSet: _exportOption.IsImage = true; _exportOption.ImageSetCollectionId = _parametersExportLoadFile.ExportLoadFileInfo.ProdImgCollectionId; break; } if (_parametersExportLoadFile.ExportLoadTagInfo != null && _parametersExportLoadFile.ExportLoadTagInfo.IncludeTag) { _exportOption.TagList = _parametersExportLoadFile.ExportLoadTagInfo.TagList; } #endregion #region Get delimiters var columnDelimiter = Convert.ToChar( Convert.ToInt32(_parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat.Column)); var quoteCharacter = Convert.ToChar( Convert.ToInt32(_parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat.Quote)); if (string.IsNullOrEmpty(_parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat.Column) || string.IsNullOrEmpty(_parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat.Quote) || string.IsNullOrEmpty(_parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat.NewLine) || (_exportOption.IsTag && string.IsNullOrEmpty(_parametersExportLoadFile.ExportLoadTagInfo.Delimeter))) { Tracer.Info( "ExportOption Startup Worker: One or more delimiters are null or empty for job run id:{0}", PipelineId); } #endregion #region Create files _loadFileFullyQualifiedName = _parametersExportLoadFile.ExportLoadFileInfo.FilePath + @"\" + _parametersExportLoadFile.ExportLoadFileInfo.FileName + "." + _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat. FileExtension; var encoding = _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileFormat.EncodingType == EncodingTypeSelection.Ansi ? Encoding.GetEncoding(Constants.Ansi) : Encoding.Unicode; try { Tracer.Info("Export Load File path = {0}", _loadFileFullyQualifiedName); CreateLoadFileWithHeader(_loadFileFullyQualifiedName, columnDelimiter, quoteCharacter, encoding); } catch (Exception) { LogMessage(false, Constants.FailureInCreateLoadFile); throw; } _exportOption.LoadFilePath = _loadFileFullyQualifiedName; if (_parametersExportLoadFile.ExportLoadFileInfo.PriImgSelection == SetSelection.ProductionSet || _parametersExportLoadFile.ExportLoadFileInfo.PriImgSelection == SetSelection.ImageSet) { _imageHelperFileName = _parametersExportLoadFile.ExportLoadFileInfo.FilePath + Constants.BackSlash + _parametersExportLoadFile.ExportLoadFileInfo.FileName + Constants.OptFileExtension; _exportOption.LoadFileImageHelperFilePath = _imageHelperFileName; } if (_parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.IncludeTextFile && _parametersExportLoadFile.ExportLoadFileInfo.ExportLoadFileOption.Nameselection == TextFileNameSelection.UseOPT) { _contentHelperFileName = _parametersExportLoadFile.ExportLoadFileInfo.FilePath + Constants.TextHelperFileName; _exportOption.LoadFileTextHelperFilePath = _contentHelperFileName; } #endregion #region Assertion _parametersExportLoadFile.CreatedBy.ShouldNotBeEmpty(); #endregion #region Set User if (!string.IsNullOrEmpty(_parametersExportLoadFile.CreatedBy)) { _createdBy = _parametersExportLoadFile.CreatedBy; } else { Tracer.Error( "ExportOption Startup Worker: Job created by user id not specified in boot parameters for job run id:{0}", PipelineId); //TODO: throw appropriate exception after analysis. } MockSession(); #endregion BuildSearchQueryForExportLoadFile(); } }
/// <summary> /// Absorb the boot parameters, deserialize and pass on the messages to the Search Worker /// </summary> public void DoBeginWork(string bootParameter) { bootParameter.ShouldNotBeEmpty(); // Deserialize and determine the boot object _bootObject = GetBootObject(bootParameter); // Assert condition to check for jobscheduled by _bootObject.JobScheduleCreatedBy.ShouldNotBeEmpty(); _bootObject.BinderFolderId.ShouldNotBe(0); // Get Dataset Details to know about the Collection id and the Matter ID details _datasetEntity = DataSetBO.GetDataSetDetailForDataSetId(_bootObject.datasetId); //Assert condition to check for dataset details _datasetEntity.ShouldNotBe(null); _binderEntity = BinderBO.GetBinderDetails(_bootObject.BinderFolderId.ToString()); _binderEntity.ShouldNotBe(null); _reviewSetRecord = ConvertToReviewSetRecord(_bootObject); // Construct the document query entity to determine the total documents _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, 1, null); // Mock the user session MockSession(); _docQueryEntity.TransactionName = _docQueryEntity.QueryObject.TransactionName = "ReviewsetStartupWorker - DoBeginWork (GetCount)"; var reviewsetLogic = _reviewSetRecord.ReviewSetLogic.ToLower(); if (reviewsetLogic == "all" || reviewsetLogic == "tag") { var searchQuery = !string.IsNullOrEmpty(_bootObject.SearchQuery)? _bootObject.SearchQuery.Replace("\"", ""): string.Empty; _totalDocumentCount = DocumentBO.GetDocumentCountForCreateReviewsetJob(_datasetEntity.Matter.FolderID, _datasetEntity.CollectionId, reviewsetLogic, searchQuery); } else { // Retrieve the total documents qualified _totalDocumentCount = ReviewerSearchInstance.GetDocumentCount(_docQueryEntity.QueryObject); } Tracer.Info("Reviewset Startup Worker : {0} matching documents determined for the requested query", _totalDocumentCount); if (_totalDocumentCount < 1) { var message = String.Format("Search server does not return any documents for the reviewset '{0}'", _reviewSetRecord.ReviewSetName); throw new ApplicationException(message); } LogMessage(true, string.Format("{0} documents are qualified", _totalDocumentCount)); // Construct the document query entity to write the resultant documents in xml file var outputFields = new List <Field>(); outputFields.AddRange(new List <Field>() { new Field { FieldName = EVSystemFields.DcnField }, new Field { FieldName = EVSystemFields.FamilyId }, new Field { FieldName = EVSystemFields.DuplicateId } }); _docQueryEntity = GetQueryEntity(_bootObject, _datasetEntity, 0, Convert.ToInt32(_totalDocumentCount), outputFields); }
/// <summary> /// Get document collection for reconversion /// </summary> /// <returns></returns> /// public ConversionDocCollection GetReconversionDocCollection( ) { var docs = new ConversionDocCollection(); //collectionid to be used in reconversion string collectionId = ""; //populate job info docs.JobConfig = BootObject; BaseJobBEO baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(BootObject.OrginialJobId)); docs.BaseJobTypeId = baseConfig.JobTypeId; //different type of base job has different object to hold job config info if (baseConfig.JobTypeId == 9) // Base job is production job { docs.BaseJobConfig = GetBootObject <ProductionDetailsBEO>(baseConfig.BootParameters); //for production reconversion, the collection id is the production Set collectionId, which is the collectionId in job parameter collectionId = ((ProductionDetailsBEO)docs.BaseJobConfig).OriginalCollectionId; //this is the native set collectionId //dataset associate with the document set docs.DataSet = DataSetBO.GetDataSetDetailForCollectionId(collectionId); //matterid associate with the document set long matterId = docs.DataSet.Matter.FolderID; //get the list of production document list to be reprocessed var helper = new ConversionReprocessStartupHelper(); IEnumerable <ReconversionProductionDocumentBEO> pDocs = helper.GetProductionDocumentList( BootObject.FilePath, BootObject.JobSelectionMode, matterId, docs.BaseJobConfig as ProductionDetailsBEO, docs.DataSet.RedactableDocumentSetId, Convert.ToInt32(BootObject.OrginialJobId), BootObject.Filters); //cast back to parent list of parent class if (pDocs != null && pDocs.Any()) { docs.Documents = pDocs.Cast <ReconversionDocumentBEO>().ToList(); } } else { if (baseConfig.JobTypeId == 14) //load file import { docs.BaseJobConfig = GetBootObject <ImportBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((ImportBEO)docs.BaseJobConfig).CollectionId; } else if (baseConfig.JobTypeId == 2 || baseConfig.JobTypeId == 8) //DCB import and Edoc Import { docs.BaseJobConfig = GetBootObject <ProfileBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((ProfileBEO)docs.BaseJobConfig).DatasetDetails.CollectionId; } else if (baseConfig.JobTypeId == 35) //Law import { docs.BaseJobConfig = GetBootObject <LawImportBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((LawImportBEO)docs.BaseJobConfig).CollectionId; } //dataset associate with the document set docs.DataSet = DataSetBO.GetDataSetDetailForCollectionId(collectionId); //assign heartbeat file path, if directory not exists, create it. docs.HeartbeatFilePath = docs.DataSet.CompressedFileExtractionLocation + ApplicationConfigurationManager.GetValue("ReconversionHeartbeatFileFolder", "Imports") + PipelineId; if (!Directory.Exists(docs.HeartbeatFilePath)) { Directory.CreateDirectory(docs.HeartbeatFilePath); } //matterid associate with the document set long matterId = docs.DataSet.Matter.FolderID; docs.Documents = ConversionReprocessStartupHelper.GetImportDocumentList( BootObject.FilePath, BootObject.JobSelectionMode, matterId, docs.DataSet.FolderID, BootObject.OrginialJobId, BootObject.Filters); } return(docs); }
/// <summary> /// Processes the data. /// </summary> /// <param name="printDocuments"></param> public void ProcessTheDocument(List <DocumentResult> printDocuments) { if (_mBootParameters == null) { return; } if (string.IsNullOrEmpty(_mBootParameters.DataSet.CollectionId)) { return; } // Get mapped printer _mMappedPrinterToNetwork = PrinterManagementBusiness.GetMappedPrinter( new MappedPrinterIdentifierBEO( _mBootParameters.Printer.UniqueIdentifier.Split(Constants.Split).Last(), true)); // Create folder CreateFoldersForTemporaryStorage(); //Get Dataset and Matter information for a given Collection Id _mDataSet = DataSetBO.GetDataSetDetailForCollectionId(_mBootParameters.DataSet.CollectionId); //Get DataSet Fields _mDataSet.DatasetFieldList.AddRange( DataSetBO.GetDataSetDetailForDataSetId(_mDataSet.FolderID).DatasetFieldList); // Get Matter information _mDataSet.Matter = MatterBO.GetMatterInformation(_mDataSet.FolderID); _mDatasetName = _mDataSet.FolderName; var documents = new List <DocumentResult>(); var documentIdentifierEntities = new List <DocumentIdentifierEntity>(); foreach (var document in printDocuments) { try { string errorCode; var separatorSheetFolder = Guid.NewGuid(); var separatorSheet = Path.Combine(Path.Combine(_mSharedLocation, _mBootParameters.Name), Constants.SourceDirectoryPath, separatorSheetFolder.ToString(), Constants.separatorHtml); CreateseparatorSheet(separatorSheet, _mBootParameters.DataSet.MatterId, _mBootParameters.DataSet.CollectionId, document.DocumentID); //Print the document set var jobRunId = (!String.IsNullOrEmpty(PipelineId)) ? Convert.ToInt32(PipelineId) : 0; var jobId = JobMgmtDAO.GetJobIdFromJobRunId(jobRunId); var status = PrintDocumentSet(jobId.ToString(CultureInfo.InvariantCulture), _mBootParameters, document, separatorSheet, out errorCode); if (status) { document.CreatedDate = DateTime.Now; documents.Add(document); // Log the message using Log worker... LogMessage(document, true, string.Empty); } else { // Log the message using Log worker... LogMessage(document, false, errorCode); } if (_mDataSet != null && _mDataSet.Matter != null) { var documentIdentifierEntity = new DocumentIdentifierEntity(); documentIdentifierEntity.CollectionId = document.CollectionID; documentIdentifierEntity.Dcn = document.DocumentControlNumber; documentIdentifierEntity.DocumentReferenceId = document.DocumentID; documentIdentifierEntity.CollectionName = _mDataSet.FolderName; documentIdentifierEntities.Add(documentIdentifierEntity); } } catch (Exception ex) { //report to director and continue with other documents if there is error in printing a documents ex.Trace().Swallow(); ReportToDirector(ex); } } if (documents.Count > 0) { Tracer.Info("Print Processing worker - Document Count: {0}", documents.Count.ToString(CultureInfo.InvariantCulture)); Send(documents); if (_mDataSet != null && _mDataSet.Matter != null ) { AuditLogFacade.LogDocumentsPrinted(_mDataSet.Matter.FolderID, documentIdentifierEntities); } } }
protected override void BeginWork() { try { base.BeginWork(); m_Parameters = GetImportBEO(BootParameters); m_Parameters.ShouldNotBe(null); m_LoadFileUri = new Uri(m_Parameters.Locations.First()); m_ColumnDelimiter = (char)m_Parameters.LoadFile.ColumnDelimiter; m_QuoteCharacter = (char)m_Parameters.LoadFile.QuoteCharacter; m_NewlineCharacter = (char)m_Parameters.LoadFile.NewlineDelimiter; m_RecordTokenizer = new RecordTokenizer(m_ColumnDelimiter, m_QuoteCharacter); m_EncodingType = Encoding.GetEncoding(m_Parameters.LoadFile.EncodingType); m_IsFirstLineHeader = m_Parameters.LoadFile.IsFirstLineHeader; var loadFilePath = HttpUtility.UrlDecode(m_LoadFileUri.OriginalString); ReportToDirector("LoadFileParser works on load file {0}", loadFilePath); m_StreamReader = new StreamReader(loadFilePath, m_EncodingType); #region Dataset Detaills m_Parameters.DatasetId.ShouldBeGreaterThan(0); m_Dataset = DataSetBO.GetDataSetDetailForDataSetId(m_Parameters.DatasetId); var matterDetails = MatterDAO.GetMatterDetails(m_Parameters.MatterId.ToString()); matterDetails.ShouldNotBe(null); m_Dataset.Matter = matterDetails; var searchServerDetails = ServerDAO.GetSearchServer(matterDetails.SearchServer.Id); searchServerDetails.ShouldNotBe(null); m_Dataset.Matter.SearchServer = searchServerDetails; m_DatasetPath = m_Dataset.CompressedFileExtractionLocation; #endregion if (m_Parameters != null && m_Parameters.IsImportImages && m_Parameters.LoadFile.ImageFile != null && m_Parameters.LoadFile.ImageFile.ImageExtractionOption == LoadFileImageExtractionOption.HelperFile) { var imageHelperFileName = m_Parameters.LoadFile.ImageFile.HelperFileName; ReportToDirector("LoadFileParser uses image helper file {0}", imageHelperFileName); _imageHelperFileParser = new HelperFileParser(this, imageHelperFileName); } if (m_Parameters != null && m_Parameters.LoadFile.ContentFile != null && m_Parameters.LoadFile.ContentFile.TextExtractionOption == LoadFileTextExtractionOption.HelperFile) { var contentHelperFileName = m_Parameters.LoadFile.ContentFile.HelperFileName; ReportToDirector("LoadFileParser uses content (text) helper file {0}", contentHelperFileName); TextHelperFile = new HelperFile(this, contentHelperFileName); } if (null != m_Parameters && null != m_Parameters.LoadFile && null != m_Parameters.LoadFile.ContentFile && null != m_Parameters.LoadFile.ContentFile.LoadFileContentField) { m_ContentFieldNumber = Convert.ToInt32(m_Parameters.LoadFile.ContentFile.LoadFileContentField); } _uniqueThreadString = Guid.NewGuid().ToString().Replace("-", "").ToUpper(); SetMessageBatchSize(m_Parameters); } catch (Exception ex) { //Send log to Log Pipe LogMessage(false, Constants.ParserFailureMessageOnInitialize); ex.Trace(); ReportToDirector("Exception in LoadFileParser.BeginWork", ex.ToDebugString()); throw; } }