public CreatePaymentMock( IOptions <ConnectionStringsSettings> connectionStringsOptions, IOptions <QueuesSettings> queuesOptionsSettings, Get.GetPayment getPayment, IMapper mapper, QueueHandler queueHandler) : base(connectionStringsOptions, queuesOptionsSettings, getPayment, mapper, queueHandler) { }
public void Test_QueueHandler_4_threads_test() { List<string> result = new List<string>(); var queueHandler = new QueueHandler<string>((p, token) => { Thread.Sleep(10); result.Add(p); }); Action addAction = () => { var someValue = 0; for (int i = 0; i < 10; ++i) { queueHandler.AddItem(i.ToString()); someValue += queueHandler.QueueSize; } }; Thread[] threads = new Thread[4]; for(int i = 0; i < threads.Count(); ++i) threads[i] = new Thread(() => addAction()); foreach (var thread in threads) thread.Start(); Thread.Sleep(2000); Assert.Equal(40, result.Count); }
static void Main(string[] args) { Queue += (QueueHandler) delegate { System.Console.WriteLine("OK"); }; }
public async Task GetQueuAsync(int a) { QueueHandler queuHandler = new QueueHandler(); var queuValue = queuHandler.GetQueue(); MessageBox.Show(queuValue); }
public void verify_id_is_opaque_and_not_contains_blob_id() { var info = new QueueInfo("test", "", "docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail"), }, DocumentDescriptorId = new DocumentDescriptorId(1), Handle = new DocumentHandle("Revision_2"), }; sut.Handle(rm, new TenantId("test_tenant")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); var job = collection.AsQueryable().Single(); Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1"))); Assert.That(job.TenantId, Is.EqualTo(new TenantId("test_tenant"))); Assert.That(job.DocumentDescriptorId, Is.EqualTo(new DocumentDescriptorId(1))); Assert.That(job.Handle.ToString(), Is.EqualTo(rm.Handle)); Assert.That(job.Id.ToString(), Is.Not.Contains("blob.1"), "Id should not contains internal concempts like blob id"); Assert.That(job.Id.ToString(), Is.Not.Contains("tenant"), "Id should not contains internal concempts like tenant id"); Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.BlobId)); Assert.That(job.Parameters.Keys, Is.Not.Contains(JobKeys.DocumentId)); }
// Start is called before the first frame update // Copy over Card internal values void Start() { Queue = GameObject.Find("Queue").GetComponent <QueueHandler>(); //sync costs with card APCost = matchingCard.APCost; Value = matchingCard.Value; CardType = matchingCard.CardType; if (Queue != null) { Queue.CurrentAP += APCost; if (CardType == Card.Slot.Defend) { Queue.CurrentDefenseVal += Value; } else if (CardType == Card.Slot.Attack || CardType == Card.Slot.Magic) { Queue.CurrentAttackVal += Value; } } returnButton = this.GetComponent <Button>(); returnButton.enabled = false; returnButton.onClick.AddListener(ReturnToHand); }
public void verify_get_next_job_set_identity() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Id = 1L, Handle = "FirstHandle", Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") }, DocumentDescriptorId = new DocumentDescriptorId(1), }; sut.Handle(rm, new TenantId("test")); rm.Handle = "SecondHandle"; rm.Id = 2L; //This is typical situation when handle is de-duplicated, because //and handle is assigned to another document, but the underling blob id is the same. sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); //no need to schedule another job Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }
public void Verify_job_filter_by_job_properties() { var info = new QueueInfo("test", "tika", ""); QueueHandler sut = GetSut(info); var job1 = HandleStreamToCreateJob(sut, fileName: "pippo.xlsx"); var job2 = HandleStreamToCreateJob(sut, fileName: "pippo.docx"); var nextJob = sut.GetNextJob( "identity", "handle", null, new Dictionary <string, object>() { ["file_ext"] = "pptx" }); Assert.That(nextJob, Is.Null); nextJob = sut.GetNextJob( "identity", "handle", null, new Dictionary <string, object>() { ["file_ext"] = "docx" }); Assert.That(nextJob, Is.Not.Null); Assert.That(nextJob.Id, Is.EqualTo(job2)); }
public void verify_filtering_on_mime_types() { var mimeTypeDocx = MimeTypes.GetMimeTypeByExtension("docx"); var info = new QueueInfo("test", mimeTypes: mimeTypeDocx); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.pdf"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0)); rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail") } }; sut.Handle(rm, new TenantId("test")); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); }
public void verify_pipeline_id_filter() { var info = new QueueInfo("test", "tika", ""); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice") } }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(0), "pipeline filter is not filtering out unwanted pipeline"); rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { PipelineId = new PipelineId("tika"), DocumentFormat = new DocumentFormat("tika"), BlobId = new BlobId("tika.1") } }; sut.Handle(rm, new TenantId("test")); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1), "pipeline filter is not filtering in admitted pipeline"); }
public void Execute() { int messagesRetrieved = 0; QueueHandler.InitializeQueue(); DateTime startTime = DateTime.Now; Console.WriteLine($"Starting to retrieve {_numberOfMessages} messages to queue."); while (messagesRetrieved < _numberOfMessages) { Message message = QueueHandler.Receive(); Console.WriteLine($"Starting to receive message {messagesRetrieved + 1}: {message.Id}"); string bodyString = (string)message.Body; if (bodyString.Length > 50) { bodyString = bodyString.Substring(0, 49); } string bodyStartsWith = $"Message: {bodyString}..."; Console.WriteLine(bodyStartsWith); messagesRetrieved++; } Console.WriteLine($"Retrieved all {_numberOfMessages} to the queue."); DateTime endTime = DateTime.Now; Console.WriteLine($"Started {startTime} and Ended {endTime}. Duration = {endTime - startTime}"); QueueHandler.CloseAndDispose(); }
public void verify_job_parameters_contains_mime_type() { var info = new QueueInfo("test", "", "docx"); QueueHandler sut = new QueueHandler(info, _db); StreamReadModel rm = new StreamReadModel() { Filename = new FileNameWithExtension("test.docx"), FormatInfo = new FormatInfo() { DocumentFormat = new DocumentFormat("thumb.small"), BlobId = new BlobId("blob.1"), PipelineId = new PipelineId("thumbnail"), }, DocumentDescriptorId = new DocumentDescriptorId(1), }; sut.Handle(rm, new TenantId("test_tenant")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Count(), Is.EqualTo(1)); var job = collection.AsQueryable().Single(); Assert.That(job.BlobId, Is.EqualTo(new BlobId("blob.1"))); Assert.That(job.Parameters[JobKeys.MimeType], Is.EqualTo(MimeTypes.GetMimeTypeByExtension("docx"))); }
public void verify_job_created_with_handle_metadata() { var info = new QueueInfo("test", "", "pdf|docx"); QueueHandler sut = new QueueHandler(info, _db); var customData = new DocumentCustomData() { { "test", "value" }, { "complex", 42 }, }; StreamReadModel rm = new StreamReadModel() { Id = 1L, Handle = "FirstHandle", Filename = new FileNameWithExtension("test.docx"), EventType = HandleStreamEventTypes.DocumentHasNewFormat, FormatInfo = new FormatInfo() { PipelineId = new PipelineId("soffice"), DocumentFormat = new DocumentFormat("office"), BlobId = new BlobId("soffice.1") }, DocumentDescriptorId = new DocumentDescriptorId(1), DocumentCustomData = customData, }; sut.Handle(rm, new TenantId("test")); var collection = _db.GetCollection <QueuedJob>("queue.test"); Assert.That(collection.AsQueryable().Single().HandleCustomData, Is.EquivalentTo(customData)); }
public void verify_max_number_of_falure() { var info = new QueueInfo("test", "tika", ""); info.MaxNumberOfFailure = 2; QueueHandler sut = CreateAGenericJob(info); var nextJob = sut.GetNextJob("", "handle", null, null); Assert.That(nextJob, Is.Not.Null); var jobId = nextJob.Id; sut.SetJobExecuted(nextJob.Id, "Error 42", null); nextJob = sut.GetNextJob("", "handle", null, null); Assert.That(nextJob, Is.Not.Null); sut.SetJobExecuted(nextJob.Id, "Error 42", null); nextJob = sut.GetNextJob("", "handle", null, null); Assert.That(nextJob, Is.Null, "After two failure the job should not be returned anymore"); var collection = _db.GetCollection <QueuedJob>("queue.test"); var job = collection.Find(Builders <QueuedJob> .Filter.Eq(j => j.Id, jobId)).SingleOrDefault(); Assert.That(job.ExecutionError, Is.EqualTo("Error 42")); Assert.That(job.ErrorCount, Is.EqualTo(2)); Assert.That(job.Status, Is.EqualTo(QueuedJobExecutionStatus.Failed)); }
public static void Main() { QueueHandler tMarket = new QueueHandler(); while (true) { string[] attribs = Console.ReadLine().Split(new string[] {" ", "\t", "/t" }, StringSplitOptions.RemoveEmptyEntries); switch (attribs[0]) { case "Append": tMarket.Append(attribs[1]); break; case "Insert": tMarket.Insert(int.Parse(attribs[1]), attribs[2]); break; case "Find": tMarket.Find(attribs[1]); break; case "Serve": tMarket.Serve(int.Parse(attribs[1])); break; case "End": Console.WriteLine(tMarket.Buffer.ToString()); return; default: break; } } }
public CreateCustomer(IOptions <ConnectionStringsSettings> connectionStringsOptions, IOptions <QueuesSettings> queuesOptionsSettings, IMapper mapper, QueueHandler queueHandler) { _connectionStringsSettings = connectionStringsOptions.Value; _queuesSettings = queuesOptionsSettings.Value; _mapper = mapper; _queueHandler = queueHandler; }
public void verify_job_filter_by_custom_properties() { QueueHandler sut = CreateAGenericJob(new QueueInfo("test", "tika", ""), customData: new Dictionary <String, Object>() { { "foo", 6 }, { "bar", "test" }, }); HandleStreamToCreateJob(sut, customData: new Dictionary <String, Object>() { { "foo", 42 }, { "bar", "the ultimate answer" }, }); var nextJob = sut.GetNextJob("identity", "handle", null, new Dictionary <string, Object>() { { "notexisting", 11 } }); Assert.That(nextJob, Is.Null); nextJob = sut.GetNextJob("identity", "handle", null, new Dictionary <string, Object>() { { "foo", 42 } }); Assert.That(nextJob.HandleCustomData["bar"], Is.EqualTo("the ultimate answer")); nextJob = sut.GetNextJob("identity", "handle", null, new Dictionary <string, Object>() { { "foo", 6 } }); Assert.That(nextJob.HandleCustomData["bar"], Is.EqualTo("test")); }
private QueueHandler CreateAGenericJob(QueueInfo info, String tenant = "test", Dictionary <String, Object> customData = null) { QueueHandler sut = GetSut(info); HandleStreamToCreateJob(sut, tenant, customData); return(sut); }
public void Execute() { QueueHandler.InitializeQueue(); DateTime startTime = DateTime.Now; Console.WriteLine($"Starting to upload {_numberOfMessages} messages to queue."); for (int messagesSent = 1; messagesSent <= _numberOfMessages; messagesSent++) { Message message = new Message(); message.Body = SettingsProvider.GetMessageBody(); message.Label = "Sent from MsmqTester"; message.TimeToReachQueue = new TimeSpan(0, 5, 0); message.UseDeadLetterQueue = true; message.Recoverable = SettingsProvider.GetWillPersist(); message.UseJournalQueue = false; QueueHandler.SendMessage(message); Console.WriteLine($"Uploaded message {messagesSent}"); } Console.WriteLine($"Uploaded all {_numberOfMessages} to the queue."); DateTime endTime = DateTime.Now; Console.WriteLine($"Started {startTime} and Ended {endTime}. Duration = {endTime - startTime}"); QueueHandler.CloseAndDispose(); }
public void ForCreatesHandlerForGivenQueue() { var mockQueue = new Mock <IAzureQueue <MessageStub> >(); var queueHandler = QueueHandler.For(mockQueue.Object); Assert.IsInstanceOfType(queueHandler, typeof(QueueHandler <MessageStub>)); }
public CreateCustomerMock( IOptions <ConnectionStringsSettings> connectionStringsOptions, IOptions <QueuesSettings> queuesOptionsSettings, IMapper mapper, QueueHandler queueHandler) : base(connectionStringsOptions, queuesOptionsSettings, mapper, queueHandler) { }
public CreatePayment(IOptions <ConnectionStringsSettings> connectionStringsOptions, IOptions <QueuesSettings> queuesOptionsSettings, GetPayment.GetPayment getPayment, IMapper mapper, QueueHandler queueHandler) { _connectionStringsSettings = connectionStringsOptions.Value; _queuesSettings = queuesOptionsSettings.Value; _getPayment = getPayment; _mapper = mapper; _queueHandler = queueHandler; }
public void verify_get_next_job_not_give_executing_job() { QueueHandler sut = CreateAGenericJob(new QueueInfo("test", "tika", "")); var nextJob = sut.GetNextJob("", "handle", null, null); Assert.That(nextJob, Is.Not.Null); nextJob = sut.GetNextJob("", "handle", null, null); Assert.That(nextJob, Is.Null); }
public void verify_not_duplicate_jobs_on_same_blob_id() { QueueHandler sut = CreateAGenericJob(new QueueInfo("test", "tika", "")); var nextJob = sut.GetNextJob("identity", "handle", null, null); var collection = _db.GetCollection <QueuedJob>("queue.test"); var job = collection.Find(Builders <QueuedJob> .Filter.Eq(j => j.Id, nextJob.Id)).SingleOrDefault(); Assert.That(job.ExecutingIdentity, Is.EqualTo("identity")); }
private void SetUpServices() { QueueHandler.GetInstance();//configura la cola StartRemotingServer(); //iniciar servidor de de remoting }
public static void Main(string[] args) { //FileStubHandler handler = new FileStubHandler(false); QueueHandler handler = new QueueHandler(); handler.Start(); Console.ReadKey(); handler.Stop(); }
internal RabbitMQDefaultChannel(RabbitMQProtocol protocol, ushort id, Action <ushort> closeCallback) : base(protocol) { _channelId = id; _protocol = protocol; _isOpen = false; _managerCloseCallback = closeCallback; _exchangeMethodHandler = new ExchangeHandler(_channelId, _protocol); _queueMethodHandler = new QueueHandler(_channelId, _protocol); _basicHandler = new BasicHandler(_channelId, _protocol); }
private void ExecuteHandler(Action <object, CancellationToken> messageHandler, bool multiThreadedHandler) { using (var cut = new QueueHandler(NullLogger.Instance, _queueFactory, _taskFactory)) { cut.Start("MyQueue", true, LocaleQueueMode.TemporaryMaster, true, messageHandler, null, null, 100, multiThreadedHandler, false, _cancellationToken); // ReSharper disable once AccessToDisposedClosure TimerUtility.WaitForIt(() => cut.Idle, 6000); } }
protected ListsUserControl() { // ReSharper disable once VirtualMemberCallInConstructor InitializeComponent(); SetupGuiControls(); _searchText = string.Empty; SearchBox.AttachedToVisualTree += (s, e) => SearchBox.Focus(); // Verkar ta tid QueueHandler = new QueueHandler(UpdateEntityList); }
private async static Task InitialiseQueueAsync() { LogEvent(LoggingLevel.Info, $"Initialising Queue."); var awsOptions = ConfigurationHandler.Configuration.GetAWSOptions(); var sqsClient = awsOptions.CreateServiceClient <IAmazonSQS>(); _queueHandler = new QueueHandler(sqsClient); _query = await _queueHandler.SubscribeToQueue(); }
//NEED TO INCORPORATE OF THE GAME LOGIC, SHOULD BE IN HERE private void Start() { queueLogic = queue.GetComponent <QueueHandler>(); deckLogic = deck.GetComponent <Deck>(); messagerLogic = messager.GetComponent <UIController>(); handLogic = hand.GetComponent <Hand>(); PlayerMaxHealth = 20; PlayerHealth = PlayerMaxHealth; EnemyMaxHealth = 5; EnemyHealth = EnemyMaxHealth; EnemyDamage = 5; }
/// <summary> /// Instantiate the Sms Providers /// </summary> private void LoadQueueProviders() { Assembly assembly; QueueHandler queueHandler; foreach (QueueProvider provider in _queueProviders.Values) { var assemblyName = new AssemblyName(provider.AssemblyName); assembly = Assembly.Load(assemblyName); queueHandler = assembly.CreateInstance(provider.ClassName, false, BindingFlags.Public | BindingFlags.Instance | BindingFlags.CreateInstance, null, new object[] { provider }, null, null) as QueueHandler; if (queueHandler != null) { queueHandler.VerifyConfiguration(); queueHandler.Initialize(); _queueHandlers[provider.Name] = queueHandler; if (provider == _defaultQueueProvider) _defaultQueueHandler = queueHandler; } } }
// Процедура отображения на форме записей для загруки private void ShowQueuelist(QueueHandler queueHandler) { // Список для загруки try { List<QueueStructure> IndexingQueue = queueHandler.GetIndexingQueue(); // Отобразим ее на форме QueuelistView.Items.Clear(); foreach (QueueStructure queueRecord in IndexingQueue) { //index = checkedListBoxQueue.Items.Add(queueRecord, true); var newItem = new ListViewItem(); newItem.SubItems.Add(queueRecord.SOURCE_NAME.ToString()); newItem.SubItems.Add(queueRecord.XML_DESCRIPTION.ToString()); newItem.SubItems.Add(""); //result newItem.Tag = queueRecord; QueuelistView.Items.Add(newItem); } buttonUpload.Enabled = true; // Подгоним ширину колонок // QueuelistView.AutoResizeColumns(ColumnHeaderAutoResizeStyle.HeaderSize); } catch (Exception e) { MessageBox.Show(e.Message, "DB connection", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public static void Main (string[] args) { Queue += (QueueHandler) delegate { System.Console.WriteLine ("OK"); }; }