public void verify_id_is_opaque_and_not_contains_blob_id()
 {
     var info = new QueueInfo("test", "", "docx");
     QueueHandler sut = new QueueHandler(info, _db);
     StreamReadModel rm = new StreamReadModel()
     {
         Filename = new FileNameWithExtension("test.docx"),
         FormatInfo = new FormatInfo()
         {
             DocumentFormat = new DocumentFormat("thumb.small"),
             BlobId = new BlobId("blob.1"),
             PipelineId = new PipelineId("thumbnail"),
         },
         DocumentDescriptorId = new DocumentDescriptorId(1),
         Handle = new DocumentHandle("Revision_2"),
     };
     sut.Handle(rm, new TenantId("test_tenant"));
     var collection = _db.GetCollection<QueuedJob>("queue.test");
     Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1));
     var job = collection.AsQueryable().Single();
     Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1")));
     Assert.That(job.TenantId, Is.EqualTo(new TenantId("test_tenant")));
     Assert.That(job.DocumentDescriptorId, Is.EqualTo(new DocumentDescriptorId(1)));
     Assert.That(job.Handle.ToString(), Is.EqualTo(rm.Handle));
     Assert.That(job.Id.ToString(), Is.Not.Contains("blob.1"), "Id should not contains internal concempts like blob id");
     Assert.That(job.Id.ToString(), Is.Not.Contains("tenant"), "Id should not contains internal concempts like tenant id");
     Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.BlobId));
     Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.DocumentId));
 }
 public void verify_file_extension_on_handler_filter_exact_extension()
 {
     var info = new QueueInfo("test", "", "pdf|doc");
     QueueHandler sut = new QueueHandler(info, _db);
     StreamReadModel rm = new StreamReadModel()
     {
         Filename = new FileNameWithExtension("test.docx")
     };
     sut.Handle(rm, new TenantId("test"));
     var collection = _db.GetCollection<QueuedJob>("queue.test");
     Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0));
 }
 public QueuedJobQuartzMonitor(
     IPollerJobManager pollerJobManager,
     QueueHandler[] queueHandlers,
     DocumentStoreConfiguration config)
 {
     _pollerJobManager = pollerJobManager;
     _queueHandlers = queueHandlers;
     _config = config;
   
     new List<String>() 
     {
         //@@TODO: Multiple bindings?
         config.GetServerAddressForJobs()
     };
     Logger = NullLogger.Instance;
 }
 public void verify_job_parameters_contains_mime_type()
 {
     var info = new QueueInfo("test", "", "docx");
     QueueHandler sut = new QueueHandler(info, _db);
     StreamReadModel rm = new StreamReadModel()
     {
         Filename = new FileNameWithExtension("test.docx"),
         FormatInfo = new FormatInfo()
         {
             DocumentFormat = new DocumentFormat("thumb.small"),
             BlobId = new BlobId("blob.1"),
             PipelineId = new PipelineId("thumbnail"),
         },
         DocumentDescriptorId = new DocumentDescriptorId(1),
     };
     sut.Handle(rm, new TenantId("test_tenant"));
     var collection = _db.GetCollection<QueuedJob>("queue.test");
     Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1));
     var job = collection.AsQueryable().Single();
     Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1")));
     Assert.That(job.Parameters[JobKeys.MimeType], Is.EqualTo(MimeTypes.GetMimeTypeByExtension("docx")));
 }   
 private static void HandleStreamToCreateJob(
     QueueHandler sut, 
     String tenant = "test", 
     Dictionary<String, Object> customData = null)
 {
     StreamReadModel rm = new StreamReadModel()
     {
         Filename = new FileNameWithExtension("test.docx"),
         EventType = HandleStreamEventTypes.DocumentHasNewFormat,
         FormatInfo = new FormatInfo()
         {
             PipelineId = new PipelineId("tika"),
             DocumentFormat = new DocumentFormat("tika"),
             BlobId = new BlobId("tika." + lastBlobId++)
         },
         DocumentCustomData = new DocumentCustomData(customData ?? new Dictionary<String,Object>()),
     };
     sut.Handle(rm, new TenantId(tenant));
 }
        public void verify_job_created_with_handle_metadata()
        {
            var info = new QueueInfo("test", "", "pdf|docx");
            QueueHandler sut = new QueueHandler(info, _db);
            var customData = new DocumentCustomData() 
                {
                    {"test" , "value"},
                    {"complex" , 42},
                };
            StreamReadModel rm = new StreamReadModel()
            {
                Id = 1L,
                Handle = "FirstHandle",
                Filename = new FileNameWithExtension("test.docx"),
                EventType = HandleStreamEventTypes.DocumentHasNewFormat,
                FormatInfo = new FormatInfo()
                {
                    PipelineId = new PipelineId("soffice"),
                    DocumentFormat = new DocumentFormat("office"),
                    BlobId = new BlobId("soffice.1")
                },
                DocumentDescriptorId = new DocumentDescriptorId(1),
                DocumentCustomData = customData,
            };

            sut.Handle(rm, new TenantId("test"));

            var collection = _db.GetCollection<QueuedJob>("queue.test");
            Assert.That(collection.AsQueryable().Single().HandleCustomData, Is.EquivalentTo(customData));
        }
        public void verify_get_next_job_set_identity()
        {
            var info = new QueueInfo("test", "", "pdf|docx");
            QueueHandler sut = new QueueHandler(info, _db);
            StreamReadModel rm = new StreamReadModel()
            {
                Id = 1L,
                Handle = "FirstHandle",
                Filename = new FileNameWithExtension("test.docx"),
                EventType = HandleStreamEventTypes.DocumentHasNewFormat,
                FormatInfo = new FormatInfo()
                {
                    PipelineId = new PipelineId("soffice"),
                    DocumentFormat = new DocumentFormat("office"),
                    BlobId = new BlobId("soffice.1")
                },
                DocumentDescriptorId = new DocumentDescriptorId(1),
            };

            sut.Handle(rm, new TenantId("test"));
            rm.Handle = "SecondHandle";
            rm.Id = 2L;
            //This is typical situation when handle is de-duplicated, because
            //and handle is assigned to another document, but the underling blob id is the same.
            sut.Handle(rm, new TenantId("test"));
            var collection = _db.GetCollection<QueuedJob>("queue.test");
            //no need to schedule another job
            Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1));
        }
        public void verify_pipeline_id_filter()
        {
            var info = new QueueInfo("test", "tika", "");
            QueueHandler sut = new QueueHandler(info, _db);
            StreamReadModel rm = new StreamReadModel()
            {
                Filename = new FileNameWithExtension("test.docx"),
                EventType = HandleStreamEventTypes.DocumentHasNewFormat,
                FormatInfo = new FormatInfo()
                {
                    PipelineId = new PipelineId("soffice")
                }
            };
            sut.Handle(rm, new TenantId("test"));
            var collection = _db.GetCollection<QueuedJob>("queue.test");
            Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0), "pipeline filter is not filtering out unwanted pipeline");

            rm = new StreamReadModel()
            {
                Filename = new FileNameWithExtension("test.docx"),
                FormatInfo = new FormatInfo()
                {
                    PipelineId = new PipelineId("tika"),
                    DocumentFormat = new DocumentFormat("tika"),
                    BlobId = new BlobId("tika.1")
                }
            };
            sut.Handle(rm, new TenantId("test"));

            Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1), "pipeline filter is not filtering in admitted pipeline");

        }
        public void verify_filtering_on_mime_types()
        {
            var mimeTypeDocx = MimeTypes.GetMimeTypeByExtension("docx");
            var info = new QueueInfo("test", mimeTypes: mimeTypeDocx);
            QueueHandler sut = new QueueHandler(info, _db);
            StreamReadModel rm = new StreamReadModel()
            {
                Filename = new FileNameWithExtension("test.pdf"),
                FormatInfo = new FormatInfo()
                {
                    DocumentFormat = new DocumentFormat("thumb.small"),
                    BlobId = new BlobId("blob.1"),
                    PipelineId = new PipelineId("thumbnail")
                }
            };
            sut.Handle(rm, new TenantId("test"));
            var collection = _db.GetCollection<QueuedJob>("queue.test");
            Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0));

            rm = new StreamReadModel()
            {
                Filename = new FileNameWithExtension("test.docx"),
                FormatInfo = new FormatInfo()
                {
                    DocumentFormat = new DocumentFormat("thumb.small"),
                    BlobId = new BlobId("blob.1"),
                    PipelineId = new PipelineId("thumbnail")
                }
            };
            sut.Handle(rm, new TenantId("test"));

            Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1));
        }
 public QueuedJobStatus(
     QueueHandler[] queueHandlers)
 {
     _queueHandlers = queueHandlers;
     Logger = NullLogger.Instance;
 }