/// <summary> /// Sends the specified document batch to next worker in the pipeline. /// </summary> /// <param name="docBatch">The document batch.</param> /// <param name="docCollection">The original document collection.</param> private void SendImportReconversionBatch(List <ReconversionDocumentBEO> docBatch, ConversionDocCollection docCollection) { //create new message for the batcha and send var newDocCollection = new ConversionDocCollection(); newDocCollection.BaseJobConfig = docCollection.BaseJobConfig; newDocCollection.BaseJobTypeId = docCollection.BaseJobTypeId; newDocCollection.DataSet = docCollection.DataSet; newDocCollection.Documents = docBatch; //use the docBatch for this message newDocCollection.HeartbeatFilePath = docCollection.HeartbeatFilePath; newDocCollection.JobConfig = docCollection.JobConfig; var message = new PipeMessageEnvelope { Body = newDocCollection }; //update processSet table's status BulkUpdateImportReprocessingState(newDocCollection); //send to specified output pipe Pipe outPipe = GetOutputDataPipe(Constants.OutputPipeNameToConversionReprocessImport); if (outPipe != null) { outPipe.Send(message); } }
/// <summary> /// Generates the message. /// </summary> /// <returns></returns> protected override bool GenerateMessage() { try { if (string.IsNullOrEmpty(PipelineId)) { throw new Exception("PipelineId is null or empty"); } ConversionDocCollection documentCollection = GetReconversionDocCollection(); //nothing to process, return if (documentCollection == null || documentCollection.Documents == null) { return(true); } SendMessage(documentCollection); IncreaseProcessedDocumentsCount(documentCollection.Documents.Count()); } catch (Exception ex) { LogMessage(false, "Reconversion Startup: failed to get list of document and send to next processing: " + ex.Message); ex.Trace(); } return(true); }
/// <summary> /// Bulks the state of the update production reprocessing. /// </summary> /// <param name="conversionDocCollection">The conversion doc collection.</param> /// <param name="productionDocumentDetails">The production document details.</param> private void BulkUpdateProductionReprocessingState(ConversionDocCollection conversionDocCollection, IEnumerable <ProductionDocumentDetail> productionDocumentDetails) { if (productionDocumentDetails == null) { return; } var documentConversionLogBeos = productionDocumentDetails.Select(ConvertProductionDocumentDetails).ToList(); BulkUpdateProcessedDocuments(conversionDocCollection.DataSet.Matter.FolderID, documentConversionLogBeos); }
/// <summary> /// Bulks the state of the update import reprocessing. /// </summary> /// <param name="conversionDocCollection">The conversion doc collection.</param> private void BulkUpdateImportReprocessingState(ConversionDocCollection conversionDocCollection) { var reconverionDocumentBeos = conversionDocCollection.Documents; if (reconverionDocumentBeos == null) { return; } var documentConversionLogBeos = reconverionDocumentBeos.Select(ConvertReocnversionDocumentBeo).ToList(); BulkUpdateProcessedDocuments(conversionDocCollection.DataSet.Matter.FolderID, documentConversionLogBeos); }
/// <summary> /// Sends the specified document batch to next worker in the pipeline. /// </summary> /// <param name="docCollection">The document batch.</param> public static List <ProductionDocumentDetail> ConvertToProductionDocumentList( ConversionDocCollection docCollection) { List <ProductionDocumentDetail> docList = null; var baseConfig = docCollection.BaseJobConfig as ProductionDetailsBEO; if (docCollection != null && docCollection.Documents != null && docCollection.Documents.Any()) { var modelDoc = ProductionStartupHelper.ConstructProductionModelDocument(baseConfig); docList = new List <ProductionDocumentDetail>(); int docNumber = 0; foreach (var doc in docCollection.Documents) { ProductionDocumentDetail newDoc = ConvertToProductionDocumentDetail((ReconversionProductionDocumentBEO)doc, modelDoc); //To set Bates running number for selected reprocess document int batesRunningNumber = 0; if (!string.IsNullOrEmpty(newDoc.StartingBatesNumber)) { batesRunningNumber = Convert.ToInt32(newDoc.StartingBatesNumber.Replace(newDoc.Profile.ProductionPrefix, "")); } newDoc.StartBatesRunningNumber = (batesRunningNumber > 0) ? (batesRunningNumber - Convert.ToInt32(newDoc.Profile.ProductionStartingNumber)) : batesRunningNumber; //use doc number in the sequence for correctationId, will be used as part of key in inserting to EV_JOB_ProductionTaskFlags table newDoc.CorrelationId = ++docNumber; //create the folder if it doesnot exists if (!string.IsNullOrEmpty(newDoc.ExtractionLocation) && !Directory.Exists(newDoc.ExtractionLocation)) { Directory.CreateDirectory(newDoc.ExtractionLocation); } //add the new doc to list docList.Add(newDoc); } } return(docList); }
/// <summary> /// Sends the specified document batch to next worker in the pipeline. /// </summary> /// <param name="docCollection">The document batch.</param> private void SendMessage(ConversionDocCollection docCollection) { if (docCollection.BaseJobTypeId == 2 || docCollection.BaseJobTypeId == 8 || docCollection.BaseJobTypeId == 14 || docCollection.BaseJobTypeId == 35) //import { var docBatch = new List <ReconversionDocumentBEO>(); foreach (var doc in docCollection.Documents) { docBatch.Add(doc); if (docBatch.Count == BatchSize) { SendImportReconversionBatch(docBatch, docCollection); docBatch = new List <ReconversionDocumentBEO>(); } } if (docBatch.Count != BatchSize) //the last batch is not a full batch and so not sent yet. need to send here { SendImportReconversionBatch(docBatch, docCollection); } } else if (docCollection.BaseJobTypeId == 9) //production reconversion, jump to production preprocessing { var docList = ConversionReprocessStartupHelper.ConvertToProductionDocumentList(docCollection); if (docList != null && docList.Count > 0) { var pBatch = new List <ProductionDocumentDetail>(); int count = 0; foreach (var doc in docList) { count++; pBatch.Add(doc); if (count % BatchSize == 0 || count == docList.Count) { BulkUpdateProductionReprocessingState(docCollection, pBatch); SendProductionReconversionBatch(pBatch); pBatch = new List <ProductionDocumentDetail>(); } } } } }
/// <summary> /// Get document collection for reconversion /// </summary> /// <returns></returns> /// public ConversionDocCollection GetReconversionDocCollection( ) { var docs = new ConversionDocCollection(); //collectionid to be used in reconversion string collectionId = ""; //populate job info docs.JobConfig = BootObject; BaseJobBEO baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(BootObject.OrginialJobId)); docs.BaseJobTypeId = baseConfig.JobTypeId; //different type of base job has different object to hold job config info if (baseConfig.JobTypeId == 9) // Base job is production job { docs.BaseJobConfig = GetBootObject <ProductionDetailsBEO>(baseConfig.BootParameters); //for production reconversion, the collection id is the production Set collectionId, which is the collectionId in job parameter collectionId = ((ProductionDetailsBEO)docs.BaseJobConfig).OriginalCollectionId; //this is the native set collectionId //dataset associate with the document set docs.DataSet = DataSetBO.GetDataSetDetailForCollectionId(collectionId); //matterid associate with the document set long matterId = docs.DataSet.Matter.FolderID; //get the list of production document list to be reprocessed var helper = new ConversionReprocessStartupHelper(); IEnumerable <ReconversionProductionDocumentBEO> pDocs = helper.GetProductionDocumentList( BootObject.FilePath, BootObject.JobSelectionMode, matterId, docs.BaseJobConfig as ProductionDetailsBEO, docs.DataSet.RedactableDocumentSetId, Convert.ToInt32(BootObject.OrginialJobId), BootObject.Filters); //cast back to parent list of parent class if (pDocs != null && pDocs.Any()) { docs.Documents = pDocs.Cast <ReconversionDocumentBEO>().ToList(); } } else { if (baseConfig.JobTypeId == 14) //load file import { docs.BaseJobConfig = GetBootObject <ImportBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((ImportBEO)docs.BaseJobConfig).CollectionId; } else if (baseConfig.JobTypeId == 2 || baseConfig.JobTypeId == 8) //DCB import and Edoc Import { docs.BaseJobConfig = GetBootObject <ProfileBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((ProfileBEO)docs.BaseJobConfig).DatasetDetails.CollectionId; } else if (baseConfig.JobTypeId == 35) //Law import { docs.BaseJobConfig = GetBootObject <LawImportBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((LawImportBEO)docs.BaseJobConfig).CollectionId; } //dataset associate with the document set docs.DataSet = DataSetBO.GetDataSetDetailForCollectionId(collectionId); //assign heartbeat file path, if directory not exists, create it. docs.HeartbeatFilePath = docs.DataSet.CompressedFileExtractionLocation + ApplicationConfigurationManager.GetValue("ReconversionHeartbeatFileFolder", "Imports") + PipelineId; if (!Directory.Exists(docs.HeartbeatFilePath)) { Directory.CreateDirectory(docs.HeartbeatFilePath); } //matterid associate with the document set long matterId = docs.DataSet.Matter.FolderID; docs.Documents = ConversionReprocessStartupHelper.GetImportDocumentList( BootObject.FilePath, BootObject.JobSelectionMode, matterId, docs.DataSet.FolderID, BootObject.OrginialJobId, BootObject.Filters); } return(docs); }
/// <summary> /// Reconvert document from import jobs /// </summary> /// <param name="docCollection">The documents</param> public void ImportReconvert(ConversionDocCollection docCollection) { // loop through documents to push each document for conversion var documentConversionLogBeo = new List <DocumentConversionLogBeo>(); var validationList = new List <ReconversionDocumentBEO>(); foreach (var document in docCollection.Documents) { short reasonId; byte status; int fileSize = 0; try { //if FileList does not exists, do not send for conversion if (document.FileList == null) { //can not find file LogMessage(false, "File paths null or Files does not exist"); } else { //calculate file size; multiple images file will add them together fileSize = 0; foreach (var file in document.FileList) { if (String.IsNullOrEmpty(file)) { continue; } var fileInfo = new FileInfo(file); if (fileInfo.Exists) { fileSize += (int)Math.Ceiling(fileInfo.Length / Constants.KBConversionConstant); } } } var filesWithReasonCodes = Utils.GetReasonCodes(document.FileList); var heartbeatFilePath = docCollection.GetDefaultHeartbeatFileFullPath(document); var matterId = docCollection.DataSet.Matter.FolderID; ConverterAdapter.PushDocumentForConversionWithHearbeat( matterId.ToString(CultureInfo.InvariantCulture), document.CollectionId, document.DocumentId, filesWithReasonCodes.Item1, heartbeatFilePath, CmgServiceConfigBO.GetServiceConfigurationsforConfig(Constants.PublishBlankPages)); reasonId = filesWithReasonCodes.Item2; status = filesWithReasonCodes.Item3; document.ConversionCheckCounter = 0; document.SubmittedTime = DateTime.UtcNow; document.ConversionEnqueueTime = DateTime.UtcNow; //only add submitted documents for validation, otherwise we might wait for heartbeat files that never exists. validationList.Add(document); IncreaseProcessedDocumentsCount(1); } catch (Exception ex) { ReportToDirector(ex); ex.Trace().Swallow(); if (ex is WebException) { reasonId = EVRedactItErrorCodes.FailedToSendFile; } else { reasonId = Utils.GetConversionErrorReason(ex.GetErrorCode()); } status = EVRedactItErrorCodes.Failed; LogMessage(false, ex.ToUserString()); } var logBeo = ConvertToReoconversionDocumentBeo(document, status, reasonId); logBeo.Size = fileSize; documentConversionLogBeo.Add(logBeo); } docCollection.Documents = validationList; BulkUpdateProcessedDocuments(docCollection.DataSet.Matter.FolderID, documentConversionLogBeo); }