/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { base.BeginWork(); #region Assert conditions BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); PipelineId.ShouldNotBeEmpty(); #endregion try { if (!Int32.TryParse(ApplicationConfigurationManager.GetValue(MBulkTaggingWindowSize), out _mWindowSize)) { _mWindowSize = MDefaultWindowSize; } DoBeginWork(BootParameters); } catch (Exception ex) { LogMessage(true, string.Format("Error in TagStartupWorker - Exception: {0}", ex.ToUserString())); ReportToDirector(ex); ex.Trace().Swallow(); } }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { base.BeginWork(); #region Assert conditions BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); PipelineId.ShouldNotBeEmpty(); #endregion try { DoBeginWork(BootParameters); } catch (ApplicationException apEx) { LogMessage(false, apEx.Message); throw; } catch (Exception ex) { LogMessage(false, string.Format("Split Reviewset {0} creation failed. Exception: {1}", _reviewSetRecord.ReviewSetName, ex.Message)); throw; } }
public async Task Create_ShallReturnAJob() { var mockClient = new Mock <IJobsClient>(); var mockLogger = new Mock <ILogger <ClaraJobsApi> >(); JobId.TryParse("jobid", out JobId jobId); PayloadId.TryParse("payloadid", out PayloadId payloadId); PipelineId.TryParse("pipelineid", out PipelineId pipelineId); mockClient.Setup(p => p.CreateJob(It.IsAny <PipelineId>(), It.IsAny <string>(), It.IsAny <JobPriority>())) .ReturnsAsync(new JobInfo { Name = "bla bla job", JobId = jobId, PayloadId = payloadId, PipelineId = pipelineId }); var service = new ClaraJobsApi( mockClient.Object, mockLogger.Object); var job = await service.Create(Guid.NewGuid().ToString(), "bla bla", JobPriority.Higher); Assert.Equal(jobId.ToString(), job.JobId); Assert.Equal(payloadId.ToString(), job.PayloadId); mockClient.Verify( p => p.CreateJob(It.IsAny <PipelineId>(), It.IsAny <string>(), JobPriority.Higher), Times.Exactly(1)); mockLogger.VerifyLogging(LogLevel.Information, Times.Once()); mockLogger.VerifyLogging(LogLevel.Error, Times.Never()); }
protected override void ProcessMessage(PipeMessageEnvelope message) { try { _exportDocumentCollection = (ExportDocumentCollection)message.Body; #region Assertion //Pre Condition PipelineId.ShouldNotBeEmpty(); BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); _exportDocumentCollection.ShouldNotBe(null); _exportDocumentCollection.Documents.ShouldNotBe(null); _exportDocumentCollection.Documents.LongCount().ShouldBeGreaterThan(0); #endregion if (_exportDocumentCollection == null || _exportDocumentCollection.Documents == null) { Tracer.Error("ExportOption Volume Worker: Document detail is not set in pipe message for job run id: {0}", PipelineId); return; } if (_volume == null) { InitializeForProcessing(BootParameters); } CalculateVolume(_exportDocumentCollection.Documents); } catch (Exception ex) { ex.Trace().Swallow(); ReportToDirector(ex); } }
public void AddFormat(DocumentFormat documentFormat, BlobId blobId, PipelineId createdBy) { ThrowIfDeleted(String.Format("Add format {0} and blob {1} - CreatedBy {2}", documentFormat, blobId, createdBy)); if (InternalState.HasFormat(documentFormat)) { RaiseEvent(new DocumentFormatHasBeenUpdated(documentFormat, blobId, createdBy)); } else { RaiseEvent(new FormatAddedToDocumentDescriptor(documentFormat, blobId, createdBy)); } }
/// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { var hashCode = 41; // Suitable nullity checks etc, of course :) if (Id != null) { hashCode = hashCode * 59 + Id.GetHashCode(); } if (ProgramId != null) { hashCode = hashCode * 59 + ProgramId.GetHashCode(); } if (PipelineId != null) { hashCode = hashCode * 59 + PipelineId.GetHashCode(); } if (ArtifactsVersion != null) { hashCode = hashCode * 59 + ArtifactsVersion.GetHashCode(); } if (User != null) { hashCode = hashCode * 59 + User.GetHashCode(); } hashCode = hashCode * 59 + Status.GetHashCode(); hashCode = hashCode * 59 + Trigger.GetHashCode(); if (CreatedAt != null) { hashCode = hashCode * 59 + CreatedAt.GetHashCode(); } if (UpdatedAt != null) { hashCode = hashCode * 59 + UpdatedAt.GetHashCode(); } if (FinishedAt != null) { hashCode = hashCode * 59 + FinishedAt.GetHashCode(); } if (Embedded != null) { hashCode = hashCode * 59 + Embedded.GetHashCode(); } if (Links != null) { hashCode = hashCode * 59 + Links.GetHashCode(); } return(hashCode); } }
public AddFormatToDocumentDescriptor( DocumentDescriptorId aggregateId, DocumentFormat documentFormat, BlobId blobId, PipelineId createdById) : base(aggregateId) { if (aggregateId == null) { throw new ArgumentNullException("aggregateId"); } DocumentFormat = documentFormat; BlobId = blobId; CreatedBy = createdById; }
BlobId AddFormatToDocument(int id, string handle, DocumentFormat format, PipelineId pipelineId, string pathToFile) { var fname = Path.GetFileName(pathToFile); var info = new DocumentHandleInfo(new DocumentHandle(handle), new FileNameWithExtension(fname)); var blobId = _filestore.Upload(format, pathToFile); _bus.Send(new AddFormatToDocumentDescriptor( new DocumentDescriptorId(id), format, blobId, pipelineId )); Thread.Sleep(50); return(blobId); }
/// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { var hashCode = 41; // Suitable nullity checks etc, of course :) if (PipelineId != null) { hashCode = hashCode * 59 + PipelineId.GetHashCode(); } if (Tools != null) { hashCode = hashCode * 59 + Tools.GetHashCode(); } return(hashCode); } }
protected override void ProcessMessage(PipeMessageEnvelope message) { try { _exportDocumentCollection = (ExportDocumentCollection)message.Body; #region Assertion //Pre Condition PipelineId.ShouldNotBeEmpty(); BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); _exportDocumentCollection.ShouldNotBe(null); _exportDocumentCollection.Documents.ShouldNotBe(null); _exportDocumentCollection.Documents.LongCount().ShouldBeGreaterThan(0); #endregion if (_exportDocumentCollection == null) { Tracer.Error("ExportOption File Copy Worker: Pipe message body contains empty data for job run id:{0}", PipelineId); return; } _exportDocumentCollection.Dataset = _dataset; InitializeForProcessing(BootParameters); List <JobWorkerLog <ExportFileCopyLogInfo> > fileCopyLogList; PerformCopy(out fileCopyLogList); //Audit Log InsertAuditLog(_exportDocumentCollection, _exportLoadJobDetailBeo.JobName); #region Send Message Send(); if (fileCopyLogList != null && fileCopyLogList.Count > 0) { SendLog(fileCopyLogList); } #endregion } catch (Exception ex) { ex.Trace().Swallow(); ReportToDirector(ex); } }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { base.BeginWork(); #region Assertion //Pre Condition BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); PipelineId.ShouldNotBeEmpty(); #endregion // Get values from boot parameters.... _mBootParameters = GetPrintDetailsBusinessEntity(BootParameters); _mCollectionId = _mBootParameters.DataSet.CollectionId; _mCreatedBy = _mBootParameters.RequestedBy.UserId; // Mock session MockSession(); }
protected override void ProcessMessage(PipeMessageEnvelope message) { try { var exportDocumentCollection = (ExportDocumentCollection)message.Body; #region Assertion //Pre Condition PipelineId.ShouldNotBeEmpty(); exportDocumentCollection.ShouldNotBe(null); exportDocumentCollection.Documents.ShouldNotBe(null); exportDocumentCollection.Documents.LongCount().ShouldBeGreaterThan(0); #endregion exportDocumentCollection.Dataset = _dataset; GetAllData(exportDocumentCollection); } catch (Exception ex) { ex.Trace().Swallow(); ReportToDirector(ex); } }
public void Start(Int32 pollingTimeInMs) { var jobDataMap = context.JobDetail.JobDataMap; PipelineId = new PipelineId(jobDataMap.GetString(JobKeys.PipelineId)); InputDocumentId = new DocumentId(jobDataMap.GetString(JobKeys.DocumentId)); InputBlobId = new BlobId(jobDataMap.GetString(JobKeys.BlobId)); InputDocumentFormat = new DocumentFormat(jobDataMap.GetString(JobKeys.Format)); if (TenantId == null) { throw new Exception("tenant not set!"); } _workingFolder = Path.Combine( ConfigService.GetWorkingFolder(TenantId, InputBlobId), GetType().Name ); OnExecute(context); try { if (Directory.Exists(_workingFolder)) { Directory.Delete(_workingFolder, true); } } catch (Exception ex) { Logger.ErrorFormat(ex, "Error deleting {0}", _workingFolder); } pollingTimer = new Timer(pollingTimeInMs); pollingTimer.Elapsed += pollingTimer_Elapsed; pollingTimer.Start(); }
public async Task <Job> Create(string pipeline, string jobName, JobPriority jobPriority) { return(await Policy.Handle <Exception>() .WaitAndRetryAsync( 3, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), (exception, retryCount, context) => { _logger.Log(LogLevel.Error, "Exception while creating a new job: {exception}", exception); }) .ExecuteAsync(async() => { if (!PipelineId.TryParse(pipeline, out PipelineId pipelineId)) { throw new ConfigurationException($"Invalid Pipeline ID configured: {pipeline}"); } var response = await _jobsClient.CreateJob(pipelineId, jobName, jobPriority); var job = ConvertResponseToJob(response); _logger.Log(LogLevel.Information, "Clara Job.Create API called successfully, Pipeline={0}, JobId={1}, JobName={2}", pipeline, job.JobId, jobName); return job; }).ConfigureAwait(false)); }
/// <summary> /// Returns true if RemoveToolsRequest instances are equal /// </summary> /// <param name="other">Instance of RemoveToolsRequest to be compared</param> /// <returns>Boolean</returns> public bool Equals(RemoveToolsRequest other) { if (other is null) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return (( PipelineId == other.PipelineId || PipelineId != null && PipelineId.Equals(other.PipelineId) ) && ( Tools == other.Tools || Tools != null && other.Tools != null && Tools.SequenceEqual(other.Tools) )); }
public DocumentFormatHasBeenUpdated(DocumentFormat documentFormat, BlobId blobId, PipelineId createdBy) { DocumentFormat = documentFormat; BlobId = blobId; CreatedBy = createdBy; }
public FormatInfo(BlobId blobId, PipelineId pipelineId) { BlobId = blobId; PipelineId = pipelineId; }
public void AddFormat(PipelineId pipelineId, DocumentFormat format, BlobId blobId) { this.Formats[format] = new FormatInfo(blobId, pipelineId); }
public FormatAddedToDocumentDescriptor(DocumentFormat documentFormat, BlobId blobId, PipelineId createdBy) { DocumentFormat = documentFormat; BlobId = blobId; CreatedBy = createdBy; }
/// <summary> /// Returns true if PipelineExecution instances are equal /// </summary> /// <param name="other">Instance of PipelineExecution to be compared</param> /// <returns>Boolean</returns> public bool Equals(PipelineExecution other) { if (other is null) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return (( Id == other.Id || Id != null && Id.Equals(other.Id) ) && ( ProgramId == other.ProgramId || ProgramId != null && ProgramId.Equals(other.ProgramId) ) && ( PipelineId == other.PipelineId || PipelineId != null && PipelineId.Equals(other.PipelineId) ) && ( ArtifactsVersion == other.ArtifactsVersion || ArtifactsVersion != null && ArtifactsVersion.Equals(other.ArtifactsVersion) ) && ( User == other.User || User != null && User.Equals(other.User) ) && ( Status == other.Status || Status.Equals(other.Status) ) && ( Trigger == other.Trigger || Trigger.Equals(other.Trigger) ) && ( CreatedAt == other.CreatedAt || CreatedAt != null && CreatedAt.Equals(other.CreatedAt) ) && ( UpdatedAt == other.UpdatedAt || UpdatedAt != null && UpdatedAt.Equals(other.UpdatedAt) ) && ( FinishedAt == other.FinishedAt || FinishedAt != null && FinishedAt.Equals(other.FinishedAt) ) && ( Embedded == other.Embedded || Embedded != null && Embedded.Equals(other.Embedded) ) && ( Links == other.Links || Links != null && Links.Equals(other.Links) )); }
protected override void ProcessMessage(PipeMessageEnvelope envelope) { if (envelope.Label == "PleaseFinalize") { FinalizeFiles(); return; } exportDocumentCollection = (ExportDocumentCollection)envelope.Body; #region Assertion //Pre Condition PipelineId.ShouldNotBeEmpty(); BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); exportDocumentCollection.ShouldNotBe(null); exportDocumentCollection.Documents.ShouldNotBe(null); exportDocumentCollection.Documents.LongCount().ShouldBeGreaterThan(0); #endregion exportDocumentCollection.Dataset = _dataset; try { if (parametersExportLoadFile == null) { InitializeForProcessing(BootParameters); } GetDocumentFields(exportDocumentCollection.Documents); #region Get Content-Field value from Text file GetDocumentsContentField(); #endregion if (exportDocumentCollection.ExportOption.IsImage || exportDocumentCollection.ExportOption.IsProduction) { //Set Images File Path.. var loadFileHelper = new ExportLoadFileHelper(BootParameters); Parallel.ForEach(exportDocumentCollection.Documents, new ParallelOptions { MaxDegreeOfParallelism = _maxParallelThread }, (docDetail) => loadFileHelper.SetImageSourceFiles(docDetail, exportDocumentCollection.ExportOption)); } var fileWriterLogList = WriteLoadFiles(); #region Send Log if (fileWriterLogList != null && fileWriterLogList.Any()) { //Send to Log pipe SendLog(fileWriterLogList); } #endregion documentContentFieldsValueCollection.Clear(); } catch (Exception ex) { ex.Trace().Swallow(); ReportToDirector(ex); } }
public async Task <HttpResponseMessage> AddFormatToDocument(TenantId tenantId, DocumentFormat format) { var errorMessage = await AddFormatFromHttpContent(Request.Content, format); Logger.DebugFormat("File {0} processed with message {1}", _blobId, errorMessage); if (errorMessage != null) { Logger.Error("Error Adding format To Document: " + errorMessage); return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, errorMessage )); } String queueName = _customData[AddFormatToDocumentParameters.QueueName] as String; String jobId = _customData[AddFormatToDocumentParameters.JobId] as String; DocumentDescriptorId documentId; if (String.IsNullOrEmpty(queueName)) { //user ask for handle, we need to grab the handle var documentHandle = new DocumentHandle(_customData[AddFormatToDocumentParameters.DocumentHandle] as String); var handle = _handleWriter.FindOneById(documentHandle); documentId = handle.DocumentDescriptorId; if (documentId == null) { Logger.ErrorFormat("Trying to add a format for Handle {0} with a null DocumentId", documentHandle); return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, "" )); } Logger.DebugFormat("Add format {0} to handle {1} and document id {2}", format, handle, documentId); } else { var job = _queueDispatcher.GetJob(queueName, jobId); if (job == null) { Logger.WarnFormat("Job id {0} not found in queue {1}", jobId, queueName); return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, String.Format("Job id {0} not found in queue {1}", jobId, queueName))); } documentId = job.DocumentDescriptorId; if (documentId == null) { Logger.ErrorFormat("Trying to add a format for Job Id {0} queue {1} - Job has DocumentDescriptorId null", jobId, queueName); return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, "" )); } //need to check if the descriptor is deleted var exists = _documentDescriptorReader .AllUnsorted .Where(d => d.Id == documentId) .Any(); if (!exists) { Logger.ErrorFormat("Trying to add a format for Job Id {0} queue {1} - DocumentDescriptor does not exists or was deleted!", jobId, queueName); return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, "" )); } Logger.DebugFormat("Add format {0} to job id {1} and document id {2}", format, job.Id, documentId); } if (format == "null") { var formatFromFileName = _documentFormatTranslator.GetFormatFromFileName(_fileName); if (formatFromFileName == null) { String error = "Format not specified and no known format for file: " + _fileName; Logger.Error(error); return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, error )); } format = new DocumentFormat(formatFromFileName); } var createdById = new PipelineId(_customData[AddFormatToDocumentParameters.CreatedBy] as String); Logger.DebugFormat("Incoming new format for documentId {0}", documentId); var command = new AddFormatToDocumentDescriptor(documentId, format, _blobId, createdById); CommandBus.Send(command, "api"); return(Request.CreateResponse( HttpStatusCode.OK, new AddFormatToDocumentResponse { Result = true, } )); }
private int _documentsRetrievalbatchSize; //value set by configuration #endregion #endregion #region OverDrive protected override void BeginWork() { base.BeginWork(); #region Assertion //Pre Condition BootParameters.ShouldNotBe(null); BootParameters.ShouldBeTypeOf <string>(); PipelineId.ShouldNotBeEmpty(); #endregion _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("ExportBatchSize", "Export")); _documentsRetrievalbatchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("ExportGetDocumentsBatchSize", "Export")); #region Load File Initialize(BootParameters); #endregion #region DCB - Restored When DCB Export is migrated to Overdrive /* else if (PipelineType == PipelineType.ExportDcb) * { * _parametersExportDCB = GetExportBEO<ExportDCBJobDetailBEO>((string)BootParameters); * if (_parametersExportDCB != null) * { * //dataset * if (!string.IsNullOrEmpty(_parametersExportDCB.DatasetId) && !string.IsNullOrEmpty(_parametersExportDCB.MatterId)) * { * _dataset = GetDatasetDetails(Convert.ToInt64(_parametersExportDCB.DatasetId), _parametersExportDCB.MatterId); * } * #region ExportOption Option * _exportOption = new ExportOption(); * //Native * if (_parametersExportDCB.ExportDCBFileInfo.ExportDCBFileOption.IncludeNativeFiles) * _exportOption.IsNative = true; * * //Production/Image * if (_parametersExportDCB.ExportDCBFileInfo.PriImgSelection == SetSelection.ProductionSet) * { * _exportOption.IsProduction = true; * _exportOption.ProductionSetCollectionId = _parametersExportDCB.ExportDCBFileInfo.ProdImgCollectionId; * } * else if (_parametersExportDCB.ExportDCBFileInfo.PriImgSelection == SetSelection.ImageSet) * { * _exportOption.IsImage = true; * _exportOption.ImageSetCollectionId = _parametersExportDCB.ExportDCBFileInfo.ProdImgCollectionId; * } * * //Fields * if (_parametersExportDCB.ExportDCBFields.ExportDCBFields != null && _parametersExportDCB.ExportDCBFields.ExportDCBFields.Count > 0) * _exportOption.IsField = true; * * //Tag * if (_parametersExportDCB.ExportDCBTagInfo != null && _parametersExportDCB.ExportDCBTagInfo.IncludeTag) * _exportOption.IsTag = true; * * //Comments * if (_parametersExportDCB.ExportDCBTagInfo != null && (_parametersExportDCB.ExportDCBTagInfo.IncludeTextDocumentComment || _parametersExportDCB.ExportDCBTagInfo.IncludeTextLevelComment)) * _exportOption.IsComments = true; * * if (!string.IsNullOrEmpty(_parametersExportDCB.ExportDCBFileInfo.FilePath)) * _exportOption.ExportDestinationFolderPath = _parametersExportDCB.ExportDCBFileInfo.FilePath; #endregion * #region Set User * if (!string.IsNullOrEmpty(_parametersExportDCB.CreatedBy)) * _createdBy = _parametersExportDCB.CreatedBy; * else * { * Tracer.Error("ExportOption Startup Worker: Job created by user id not specified in boot parameters for job run id:{0}", PipelineId); * //TODO: throw appropriate exception after analysis. * } * MockSession(); #endregion * #region Construct Search Query * _searchQuery = GetSearchQueryForExportDCB(out _reviewsetId, out _isIncludeConceptSearch); #endregion * } * }*/ #endregion try { _totalDocumentCount = SetTotalDocumentsCount(); } catch (Exception ex) { Tracer.Error( "ExportOption Startup Worker: On beginWork failed to set total documents count for job run id:{0}, exception:{1}", PipelineId, ex); LogMessage(false, Constants.FailureInSearch); throw; } if (_totalDocumentCount <= 0) { Tracer.Error("ExportOption Startup Worker: Search return empty records for job run id:{0}", PipelineId); LogMessage(false, Constants.ExportSearchNoRecords); CleanFileResources(); throw new EVException().AddUsrMsg(Constants.ExportSearchNoRecords); } }