public void verify_get_next_job_set_identity() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Id = 1L, Handle = "FirstHandle", Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") }, DocumentDescriptorId = new DocumentDescriptorId(1), }; sut.Handle(rm, new TenantId("test")); rm.Handle = "SecondHandle"; rm.Id = 2L; //This is typical situation when handle is de-duplicated, because //and handle is assigned to another document, but the underling blob id is the same. sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); //no need to schedule another job Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }
public void verify_job_created_with_handle_metadata() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); var customData = new DocumentCustomData() { { "test", "value" }, { "complex", 42 }, }; StreamReadModel rm = new StreamReadModel() { Id = 1L, Handle = "FirstHandle", Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") }, DocumentDescriptorId = new DocumentDescriptorId(1), DocumentCustomData = customData, }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Single().HandleCustomData, Is.EquivalentTo(customData)); }
internal bool ShouldCreateJob(StreamReadModel streamElement) { //if extensions or mime type are present we need to check belonging in one of them if (_splittedExtensions.Length > 0 || _splittedMimeTypes.Length > 0) { if (!_splittedExtensions.Contains(streamElement.Filename.Extension) && !_splittedMimeTypes.Contains(Core.MimeTypes.GetMimeType(streamElement.Filename))) { return(false); } } if (_splittedFormats.Length > 0 && !_splittedFormats.Contains(streamElement.FormatInfo.DocumentFormat.ToString())) { return(false); } if (!String.IsNullOrEmpty(Pipeline) && streamElement.FormatInfo.PipelineId != null && !Regex.IsMatch(streamElement.FormatInfo.PipelineId, Pipeline)) { return(false); } //Return only if at least one condition is met. return(_splittedExtensions.Length > 0 || _splittedFormats.Length > 0 || _splittedMimeTypes.Length > 0 || !String.IsNullOrEmpty(Pipeline)); }
public void verify_pipeline_id_filter() { var info = new QueueInfo("test", "tika", ""); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0), "pipeline filter is not filtering out unwanted pipeline"); rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { PipelineId = new PipelineId("tika"), DocumentFormat = new DocumentFormat("tika"), BlobId = new BlobId("tika.1") } }; sut.Handle(rm, new TenantId("test")); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1), "pipeline filter is not filtering in admitted pipeline"); }
public void verify_id_is_opaque_and_not_contains_blob_id() { var info = new QueueInfo("test", "", "docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail"), }, DocumentDescriptorId = new DocumentDescriptorId(1), Handle = new DocumentHandle("Revision_2"), }; sut.Handle(rm, new TenantId("test_tenant")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); var job = collection.AsQueryable().Single(); Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1"))); Assert.That(job.TenantId, Is.EqualTo(new TenantId("test_tenant"))); Assert.That(job.DocumentDescriptorId, Is.EqualTo(new DocumentDescriptorId(1))); Assert.That(job.Handle.ToString(), Is.EqualTo(rm.Handle)); Assert.That(job.Id.ToString(), Is.Not.Contains("blob.1"), "Id should not contains internal concempts like blob id"); Assert.That(job.Id.ToString(), Is.Not.Contains("tenant"), "Id should not contains internal concempts like tenant id"); Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.BlobId)); Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.DocumentId)); }
public void verify_job_parameters_contains_mime_type() { var info = new QueueInfo("test", "", "docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail"), }, DocumentDescriptorId = new DocumentDescriptorId(1), }; sut.Handle(rm, new TenantId("test_tenant")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); var job = collection.AsQueryable().Single(); Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1"))); Assert.That(job.Parameters[JobKeys.MimeType], Is.EqualTo(MimeTypes.GetMimeTypeByExtension("docx"))); }
public void verify_id_is_opaque_and_not_contains_blob_id() { var info = new QueueInfo("test", "", "docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail"), }, DocumentDescriptorId = new DocumentDescriptorId(1), Handle = new DocumentHandle("Revision_2"), }; sut.Handle(rm, new TenantId("test_tenant")); var collection = _db.GetCollection<QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); var job = collection.AsQueryable().Single(); Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1"))); Assert.That(job.TenantId, Is.EqualTo(new TenantId("test_tenant"))); Assert.That(job.DocumentDescriptorId, Is.EqualTo(new DocumentDescriptorId(1))); Assert.That(job.Handle.ToString(), Is.EqualTo(rm.Handle)); Assert.That(job.Id.ToString(), Is.Not.Contains("blob.1"), "Id should not contains internal concempts like blob id"); Assert.That(job.Id.ToString(), Is.Not.Contains("tenant"), "Id should not contains internal concempts like tenant id"); Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.BlobId)); Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.DocumentId)); }
public void verify_should_create_job_mime_extension(String mimetypes, String extensions, String fileName, Boolean expected) { QueueInfo sut = new QueueInfo("TEST", extensions : extensions, mimeTypes: mimetypes); StreamReadModel sr = new StreamReadModel(); sr.Filename = new Core.Model.FileNameWithExtension(fileName); Assert.That(sut.ShouldCreateJob(sr), Is.EqualTo(expected)); }
public void verify_filtering_on_mime_types() { var mimeTypeDocx = MimeTypes.GetMimeTypeByExtension("docx"); var info = new QueueInfo("test", mimeTypes: mimeTypeDocx); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.pdf"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0)); rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail") } }; sut.Handle(rm, new TenantId("test")); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }
public void Verify_should_create_job_mime_extension(String mimetypes, String extensions, String fileName, Boolean expected) { QueueInfo sut = new QueueInfo("TEST", extensions: extensions, mimeTypes: mimetypes); StreamReadModel sr = new StreamReadModel(); sr.Filename = new Core.Model.FileNameWithExtension(fileName); Assert.That(sut.ShouldCreateJob(sr), Is.EqualTo(expected)); }
public void verify_file_extension_on_handler_filter_exact_extension() { var info = new QueueInfo("test", "", "pdf|doc"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx") }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection<QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0)); }
public void queue_with_only_manual_execution( String mimetypes, String extensions, String pipeline, String fileName) { QueueInfo sut = new QueueInfo("TEST", extensions: extensions, mimeTypes: mimetypes, pipeline : pipeline); StreamReadModel sr = new StreamReadModel(); sr.Filename = new Core.Model.FileNameWithExtension(fileName); Assert.That(sut.ShouldCreateJob(sr), Is.EqualTo(false)); }
public void verify_file_extension_on_handler_filter_exact_extension() { var info = new QueueInfo("test", "", "pdf|doc"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx") }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0)); }
public void Queue_with_only_manual_execution( String mimetypes, String extensions, String pipeline, String fileName) { QueueInfo sut = new QueueInfo("TEST", extensions: extensions, mimeTypes: mimetypes, pipeline: pipeline); StreamReadModel sr = new StreamReadModel(); sr.Filename = new Core.Model.FileNameWithExtension(fileName); Assert.That(sut.ShouldCreateJob(sr), Is.EqualTo(false)); }
/// <summary> /// Handle a <see cref="StreamReadModel" /> and generates job for the queue /// if needed. /// </summary> /// <param name="streamElement"></param> /// <param name="tenantId"></param> /// <param name="forceReSchedule"></param> public void Handle( StreamReadModel streamElement, TenantId tenantId, Boolean forceReSchedule = false) { if (_info.ShouldCreateJob(streamElement)) { if (!forceReSchedule) { //look for already existing job with the same blobid, there is no need to re-queue again //because if a job with the same blobid was already fired for this queue there is no need //to re-issue var existing = _collection.Find( Builders< QueuedJob>.Filter.And( Builders< QueuedJob>.Filter.Eq(j => j.BlobId, streamElement.FormatInfo.BlobId), Builders<QueuedJob>.Filter.Eq(j => j.TenantId, tenantId) ) ).Count() > 0; if (existing) return; } if (Logger.IsDebugEnabled) Logger.DebugFormat("Create queue for readmodel stream id {0} and queue {1}", streamElement.Id, _info.Name); QueuedJob job = new QueuedJob(); var id = new QueuedJobId(Guid.NewGuid().ToString()); job.Id = id; job.SchedulingTimestamp = DateTime.Now; job.StreamId = streamElement.Id; job.TenantId = tenantId; job.DocumentDescriptorId = streamElement.DocumentDescriptorId; job.BlobId = streamElement.FormatInfo.BlobId; job.Handle = new DocumentHandle( streamElement.Handle); job.Parameters = new Dictionary<string, string>(); job.Parameters.Add(JobKeys.FileExtension, streamElement.Filename.Extension); job.Parameters.Add(JobKeys.Format, streamElement.FormatInfo.DocumentFormat); job.Parameters.Add(JobKeys.FileName, streamElement.Filename); job.Parameters.Add(JobKeys.TenantId, tenantId); job.Parameters.Add(JobKeys.MimeType, MimeTypes.GetMimeType(streamElement.Filename)); job.HandleCustomData = streamElement.DocumentCustomData; if (_info.Parameters != null) { foreach (var parameter in _info.Parameters) { job.Parameters.Add(parameter.Key, parameter.Value); } } _collection.InsertOne(job); } }
public FeedForStreamReadModel(StreamReadModel original) { Id = original.Id; Handle = original.Handle; EventType = original.EventType; EventTypeDesc = original.EventType.ToString(); if (original.FormatInfo != null) { DocumentFormat = original.FormatInfo.DocumentFormat; } if (original.Filename != null) { FileName = original.Filename; MimeType = MimeTypes.GetMimeType(original.Filename); } }
private static void HandleStreamToCreateJob( QueueHandler sut, String tenant = "test", Dictionary <String, Object> customData = null) { StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("tika"), DocumentFormat = new DocumentFormat("tika"), BlobId = new BlobId("tika." + lastBlobId++) }, DocumentCustomData = new DocumentCustomData(customData ?? new Dictionary <String, Object>()), }; sut.Handle(rm, new TenantId(tenant)); }
public void On(DocumentDescriptorHasNewAttachment e) { var attachmentDescriptor = _documentDescriptorReadModel.AllUnsorted.SingleOrDefault(d => d.Documents.Contains(e.Attachment)); if (attachmentDescriptor == null) { return; } var streamReadModel = new StreamReadModel() { Id = GetNewId(), Handle = e.Attachment, DocumentDescriptorId = attachmentDescriptor.Id, EventType = HandleStreamEventTypes.DocumentHasNewAttachment, }; streamReadModel.AddEventData(StreamReadModelEventDataKeys.ChildHandle, e.Attachment); _streamReadModelCollection.Insert(e, streamReadModel); }
public void Verify_file_extension_permitted() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }
public void verify_job_parameters_contains_mime_type() { var info = new QueueInfo("test", "", "docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail"), }, DocumentDescriptorId = new DocumentDescriptorId(1), }; sut.Handle(rm, new TenantId("test_tenant")); var collection = _db.GetCollection<QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); var job = collection.AsQueryable().Single(); Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1"))); Assert.That(job.Parameters[JobKeys.MimeType], Is.EqualTo(MimeTypes.GetMimeTypeByExtension("docx"))); }
internal bool ShouldCreateJob(StreamReadModel streamElement) { //if extensions or mime type are present we need to check belonging in one of them if (_splittedExtensions.Length > 0 || _splittedMimeTypes.Length > 0) { if (!_splittedExtensions.Contains(streamElement.Filename.Extension) && !_splittedMimeTypes.Contains(Core.MimeTypes.GetMimeType(streamElement.Filename))) { return false; } } if (_splittedFormats.Length > 0 && !_splittedFormats.Contains(streamElement.FormatInfo.DocumentFormat.ToString())) return false; if (!String.IsNullOrEmpty(Pipeline) && streamElement.FormatInfo.PipelineId != null && !Regex.IsMatch(streamElement.FormatInfo.PipelineId, Pipeline)) return false; //Return only if at least one condition is met. return _splittedExtensions.Length > 0 || _splittedFormats.Length > 0 || _splittedMimeTypes.Length > 0 || !String.IsNullOrEmpty(Pipeline); }
/// <summary> /// Handle a <see cref="StreamReadModel" /> and generates job for the queue /// if needed. /// </summary> /// <param name="streamElement"></param> /// <param name="tenantId"></param> /// <param name="forceReSchedule"></param> public QueuedJobId Handle( StreamReadModel streamElement, TenantId tenantId, Boolean forceReSchedule = false) { if (_info.ShouldCreateJob(streamElement)) { if (!forceReSchedule) { //look for already existing job with the same blobid, there is no need to re-queue again //because if a job with the same blobid was already fired for this queue there is no need //to re-issue var existing = _collection.Find( Builders <QueuedJob> .Filter.And( Builders <QueuedJob> .Filter.Eq(j => j.BlobId, streamElement.FormatInfo.BlobId), Builders <QueuedJob> .Filter.Eq(j => j.TenantId, tenantId) ) ).Count() > 0; if (existing) { return(null); } } if (Logger.IsInfoEnabled) { Logger.Info($"Queue {_info.Name} CREATE JOB to process {streamElement.Describe()}"); } QueuedJob job = new QueuedJob(); job.Id = new QueuedJobId(Guid.NewGuid().ToString()); job.SchedulingTimestamp = DateTime.Now; job.StreamId = streamElement.Id; job.TenantId = tenantId; job.DocumentDescriptorId = streamElement.DocumentDescriptorId; job.BlobId = streamElement.FormatInfo.BlobId; job.Handle = new DocumentHandle(streamElement.Handle); job.Parameters = new Dictionary <string, string>(); job.Parameters.Add(JobKeys.FileExtension, streamElement.Filename.Extension); job.Parameters.Add(JobKeys.Format, streamElement.FormatInfo.DocumentFormat); job.Parameters.Add(JobKeys.FileName, streamElement.Filename); job.Parameters.Add(JobKeys.TenantId, tenantId); job.Parameters.Add(JobKeys.MimeType, MimeTypes.GetMimeType(streamElement.Filename)); job.Parameters.Add(JobKeys.PipelineId, streamElement.FormatInfo?.PipelineId?.ToString()); if (forceReSchedule) { job.Parameters.Add(JobKeys.Force, "true"); } job.HandleCustomData = streamElement.DocumentCustomData; if (_info.Parameters != null) { foreach (var parameter in _info.Parameters) { job.Parameters.Add(parameter.Key, parameter.Value); } } _collection.InsertOne(job); return(job.Id); } else { if (Logger.IsDebugEnabled) { Logger.Debug($"Queue {_info.Name} do not need to process {streamElement.Describe()}"); } } return(null); }
private static void HandleStreamToCreateJob( QueueHandler sut, String tenant = "test", Dictionary<String, Object> customData = null) { StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("tika"), DocumentFormat = new DocumentFormat("tika"), BlobId = new BlobId("tika." + lastBlobId++) }, DocumentCustomData = new DocumentCustomData(customData ?? new Dictionary<String,Object>()), }; sut.Handle(rm, new TenantId(tenant)); }
public void verify_job_created_with_handle_metadata() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); var customData = new DocumentCustomData() { {"test" , "value"}, {"complex" , 42}, }; StreamReadModel rm = new StreamReadModel() { Id = 1L, Handle = "FirstHandle", Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") }, DocumentDescriptorId = new DocumentDescriptorId(1), DocumentCustomData = customData, }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection<QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Single().HandleCustomData, Is.EquivalentTo(customData)); }
public void verify_get_next_job_set_identity() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Id = 1L, Handle = "FirstHandle", Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") }, DocumentDescriptorId = new DocumentDescriptorId(1), }; sut.Handle(rm, new TenantId("test")); rm.Handle = "SecondHandle"; rm.Id = 2L; //This is typical situation when handle is de-duplicated, because //and handle is assigned to another document, but the underling blob id is the same. sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection<QueuedJob>("queue.test"); //no need to schedule another job Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }
public void verify_pipeline_id_filter() { var info = new QueueInfo("test", "tika", ""); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection<QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0), "pipeline filter is not filtering out unwanted pipeline"); rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { PipelineId = new PipelineId("tika"), DocumentFormat = new DocumentFormat("tika"), BlobId = new BlobId("tika.1") } }; sut.Handle(rm, new TenantId("test")); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1), "pipeline filter is not filtering in admitted pipeline"); }
public void verify_filtering_on_mime_types() { var mimeTypeDocx = MimeTypes.GetMimeTypeByExtension("docx"); var info = new QueueInfo("test", mimeTypes: mimeTypeDocx); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.pdf"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection<QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0)); rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail") } }; sut.Handle(rm, new TenantId("test")); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }