public void Return_should_not_return_channel_to_the_pool_if_closed() { var channel = NSubstitute.Substitute.For <IModel>(); channel.IsClosed.Returns(true); var references = new QueueReferences("lorem", "ipsum", "dolor", "amet", "sic"); var connection = NSubstitute.Substitute.For <IBusConnection>(); connection.CreateChannel() .Returns(channel); var sut = new PublisherChannelContextPool(connection); var ctx = new PublisherChannelContext(channel, references, sut); sut.Get(references); channel.Received(1) .ExchangeDeclare(references.ExchangeName, type: ExchangeType.Topic); channel.ClearReceivedCalls(); sut.Return(ctx); sut.GetAvailableCount().Should().Be(0); }
private async Task PublishToDLQAsync(IMessage message, QueueReferences queueReferences, Exception ex, CancellationToken cancellationToken) { if (string.IsNullOrWhiteSpace(queueReferences.DeadLetterTopicName)) { return; } try { _logger.LogWarning("pushing message '{MessageId}' to DLQ '{DeadLetterQueue}' ...", message.Id, queueReferences.DeadLetterTopicName); await _publisher.PublishAsync(message, queueReferences.DeadLetterTopicName, additionalHeaders : new[] { new Header(HeaderNames.Error, Encoding.UTF8.GetBytes(ex.Message)) }, cancellationToken : cancellationToken); } catch (Exception dlqEx) { _logger.LogWarning(dlqEx, "an exception has occurred while publishing message '{MessageId}' to DLQ '{DeadLetterQueue}': {Exception}", message.Id, queueReferences.DeadLetterTopicName, dlqEx.Message); } }
public async Task StartAsync_should_not_republish_to_deadletter_when_exception_occurs_and_no_deadletter_available() { var consumeResult = new ConsumeResult <Guid, byte[]>(); var expectedMessage = NSubstitute.Substitute.For <IMessage>(); var queueRefs = new QueueReferences("lorem", ""); var parser = NSubstitute.Substitute.For <IMessageParser>(); parser.Parse(consumeResult) .Returns(expectedMessage); var messageProcessor = NSubstitute.Substitute.For <IMessageProcessor>(); messageProcessor.WhenForAnyArgs(mp => mp.ProcessAsync((dynamic)expectedMessage)) .Throw(new Exception("whoops")); var publisher = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); var logger = NSubstitute.Substitute.For <ILogger <KafkaMessageHandler> >(); var sysInfo = SystemInfo.New(); var sut = new KafkaMessageHandler(parser, messageProcessor, publisher, logger, sysInfo); await sut.HandleAsync(consumeResult, queueRefs); await publisher.DidNotReceiveWithAnyArgs().PublishAsync(Arg.Any <IMessage>(), Arg.Any <string>(), null, Arg.Any <CancellationToken>()); }
public async Task StartAsync_should_republish_to_deadletter_when_exception_occurs() { var consumeResult = new ConsumeResult <Guid, byte[]>(); var expectedMessage = NSubstitute.Substitute.For <IMessage>(); var queueRefs = new QueueReferences("lorem", "ipsum"); var parser = NSubstitute.Substitute.For <IMessageParser>(); parser.Parse(consumeResult) .Returns(expectedMessage); var ex = new Exception("whoops"); var expectedErrorHeader = new Header(HeaderNames.Error, Encoding.UTF8.GetBytes(ex.Message)); var messageProcessor = NSubstitute.Substitute.For <IMessageProcessor>(); messageProcessor.WhenForAnyArgs(mp => mp.ProcessAsync((dynamic)expectedMessage)) .Throw(ex); var publisher = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); var logger = NSubstitute.Substitute.For <ILogger <KafkaMessageHandler> >(); var sysInfo = SystemInfo.New(); var sut = new KafkaMessageHandler(parser, messageProcessor, publisher, logger, sysInfo); await sut.HandleAsync(consumeResult, queueRefs); await publisher.Received().PublishAsync(expectedMessage, queueRefs.DeadLetterTopicName, Arg.Is((Header[] headers) => headers.Any(h => h.Key == expectedErrorHeader.Key)), Arg.Any <CancellationToken>()); }
public void Create_should_return_valid_context() { var channel = NSubstitute.Substitute.For <IModel>(); var references = new QueueReferences("exchange", "queue", "routingKey", "deadletterExch", "deadLetterQ"); var pool = NSubstitute.Substitute.For <IPublisherChannelContextPool>(); var ctx = new PublisherChannelContext(channel, references, pool); pool.Get(references) .Returns(ctx); var factory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); factory.Create((IMessage)null) .ReturnsForAnyArgs(references); var sut = new PublisherChannelFactory(pool, factory); var message = DummyMessage.New(); var result = sut.Create(message); result.Should().NotBeNull(); result.Channel.Should().Be(channel); result.QueueReferences.Should().Be(references); }
private static KafkaSubscriber <IMessage> BuildSUT( QueueReferences queueRefs, IConsumer <Guid, byte[]> consumer, IKafkaMessageHandler messageHandler = null) { var config = new ConsumerConfig() { GroupId = "group id" }; var builder = NSubstitute.Substitute.ForPartsOf <ConsumerBuilder <Guid, byte[]> >(config); builder.When(b => b.Build()).DoNotCallBase(); builder.Build().Returns(consumer); var builderFactory = NSubstitute.Substitute.For <IConsumerBuilderFactory>(); builderFactory.Create <IMessage, Guid, byte[]>().Returns(builder); messageHandler ??= NSubstitute.Substitute.For <IKafkaMessageHandler>(); var queueReferenceFactory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); queueReferenceFactory.Create <IMessage>().ReturnsForAnyArgs(queueRefs); var logger = NSubstitute.Substitute.For <ILogger <KafkaSubscriber <IMessage> > >(); return(new KafkaSubscriber <IMessage>(builderFactory, queueReferenceFactory, messageHandler, logger)); }
public PublisherChannelContext(IModel channel, QueueReferences queueReferences, IPublisherChannelContextPool publisherChannelContextPool) { Channel = channel ?? throw new ArgumentNullException(nameof(channel)); QueueReferences = queueReferences ?? throw new ArgumentNullException(nameof(queueReferences)); _publisherChannelContextPool = publisherChannelContextPool ?? throw new ArgumentNullException(nameof(publisherChannelContextPool)); }
public async Task HandleAsync(ConsumeResult <Guid, byte[]> result, QueueReferences queueReferences, CancellationToken cancellationToken = default) { var message = Parse(result); if (message is not null) { await Process(message, queueReferences, cancellationToken); } }
public void ctor_should_throw_if_arguments_null() { var pool = NSubstitute.Substitute.For <IPublisherChannelContextPool>(); var channel = NSubstitute.Substitute.For <IModel>(); var references = new QueueReferences("exchange", "queue", "routingKey", "deadletterExch", "deadLetterQ"); Assert.Throws <ArgumentNullException>(() => new PublisherChannelContext(null, references, pool)); Assert.Throws <ArgumentNullException>(() => new PublisherChannelContext(channel, null, pool)); Assert.Throws <ArgumentNullException>(() => new PublisherChannelContext(channel, references, null)); }
private async Task HandleProcessErrors(IMessage message, QueueReferences queueReferences, Exception ex, CancellationToken cancellationToken) { _logger.LogWarning(ex, "an exception has occurred while consuming message '{MessageId}': {Exception}", message.Id, ex.Message); //TODO: consider adding retry policy, maybe using message headers to store the retry count //await RePublishAsync(message, queueReferences, cancellationToken); await PublishToDLQAsync(message, queueReferences, ex, cancellationToken); }
public async Task StartAsync_should_consume_incoming_messages() { var queueRefs = new QueueReferences("lorem", "ipsum"); var consumer = NSubstitute.Substitute.For <IConsumer <Guid, byte[]> >(); var sut = BuildSUT(queueRefs, consumer); var tokenSource = new CancellationTokenSource(1000); await sut.StartAsync(tokenSource.Token); consumer.ReceivedWithAnyArgs().Consume(Arg.Any <CancellationToken>()); }
public async Task StartAsync_should_subscribe_to_topic() { var queueRefs = new QueueReferences("lorem", "ipsum"); var consumer = NSubstitute.Substitute.For <IConsumer <Guid, byte[]> >(); var sut = BuildSUT(queueRefs, consumer); var tokenSource = new CancellationTokenSource(1000); await sut.StartAsync(tokenSource.Token); consumer.Received(1).Subscribe(queueRefs.TopicName); }
public void Dispose_should_return_to_pool() { var pool = NSubstitute.Substitute.For <IPublisherChannelContextPool>(); var channel = NSubstitute.Substitute.For <IModel>(); var references = new QueueReferences("exchange", "queue", "routingKey", "deadletterExch", "deadLetterQ"); var sut = new PublisherChannelContext(channel, references, pool); sut.Dispose(); pool.Received(1) .Return(sut); }
private async Task Process(IMessage message, QueueReferences queueReferences, CancellationToken cancellationToken) { _logger.LogInformation("client {ClientGroup}/{ClientId} received message '{MessageId}' from Topic '{Topic}'. Processing...", _systemInfo.ClientGroup, _systemInfo.ClientId, message.Id, queueReferences.TopicName); try { await _messageProcessor.ProcessAsync((dynamic)message, cancellationToken); } catch (Exception ex) { await HandleProcessErrors(message, queueReferences, ex, cancellationToken); } }
public RabbitSubscriber(IBusConnection connection, IQueueReferenceFactory queueReferenceFactory, IMessageParser messageParser, IMessageProcessor messageProcessor, ILogger <RabbitSubscriber <TM> > logger) { if (queueReferenceFactory == null) { throw new ArgumentNullException(nameof(queueReferenceFactory)); } _connection = connection ?? throw new ArgumentNullException(nameof(connection)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _messageProcessor = messageProcessor ?? throw new ArgumentNullException(nameof(messageProcessor)); _messageParser = messageParser ?? throw new ArgumentNullException(nameof(messageParser)); _queueReferences = queueReferenceFactory.Create <TM>(); }
public async Task StartAsync_should_process_incoming_messages() { var queueRefs = new QueueReferences("lorem", "ipsum"); var consumeResult = new ConsumeResult <Guid, byte[]>(); var consumer = NSubstitute.Substitute.For <IConsumer <Guid, byte[]> >(); consumer.Consume(Arg.Any <CancellationToken>()).ReturnsForAnyArgs(consumeResult); var handler = NSubstitute.Substitute.For <IKafkaMessageHandler>(); var sut = BuildSUT(queueRefs, consumer, handler); var tokenSource = new CancellationTokenSource(1000); await sut.StartAsync(tokenSource.Token); await handler.Received().HandleAsync(consumeResult, queueRefs, Arg.Any <CancellationToken>()); }
private async Task RePublishAsync(IMessage message, QueueReferences queueReferences, CancellationToken cancellationToken) { try { _logger.LogWarning("republishing message '{MessageId}' to topic '{Topic}' ...", message.Id, queueReferences.TopicName); await _publisher.PublishAsync(message, queueReferences.TopicName, cancellationToken : cancellationToken); } catch (Exception ex) { _logger.LogError(ex, "an exception has occurred while publishing message '{MessageId}' to topic '{Topic}': {Exception}", message.Id, queueReferences.TopicName, ex.Message); } }
public void Dispose_should_dispose_channels() { var openChannel = NSubstitute.Substitute.For <IModel>(); var references = new QueueReferences("lorem", "ipsum", "dolor", "amet", "sic"); var connection = NSubstitute.Substitute.For <IBusConnection>(); var sut = new PublisherChannelContextPool(connection); var ctx = new PublisherChannelContext(openChannel, references, sut); sut.Return(ctx); sut.Dispose(); openChannel.Received(1) .Dispose(); }
public void Create_should_return_valid_context() { var connection = NSubstitute.Substitute.For <IBusConnection>(); var factory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); var references = new QueueReferences("exchange", "queue", "deadletterExch", "deadLetterQ"); factory.Create(null) .ReturnsForAnyArgs(references); var sut = new PublisherChannelFactory(connection, factory); var message = DummyMessage.New(); var result = sut.Create(message); result.Should().NotBeNull(); result.Channel.Should().NotBeNull(); result.QueueReferences.Should().Be(references); }
public async Task StartAsync_should_parse_incoming_messages() { var parser = NSubstitute.Substitute.For <IMessageParser>(); var messageProcessor = NSubstitute.Substitute.For <IMessageProcessor>(); var publisher = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); var logger = NSubstitute.Substitute.For <ILogger <KafkaMessageHandler> >(); var sysInfo = SystemInfo.New(); var queueRefs = new QueueReferences("lorem", "ipsum"); var consumeResult = new ConsumeResult <Guid, byte[]>(); var sut = new KafkaMessageHandler(parser, messageProcessor, publisher, logger, sysInfo); await sut.HandleAsync(consumeResult, queueRefs); parser.Received().Parse(consumeResult); }
public void Get_should_return_valid_channel() { var expectedChannel = NSubstitute.Substitute.For <IModel>(); var connection = NSubstitute.Substitute.For <IBusConnection>(); connection.CreateChannel() .Returns(expectedChannel); var sut = new PublisherChannelContextPool(connection); var references = new QueueReferences("lorem", "ipsum", "dolor", "amet", "sic"); var result = sut.Get(references); result.Should().NotBeNull(); result.QueueReferences.Should().Be(references); result.Channel.Should().Be(expectedChannel); expectedChannel.Received(1) .ExchangeDeclare(references.ExchangeName, type: ExchangeType.Topic); }
public async Task PublishAsync_should_throw_when_publish_fails() { var message = DummyMessage.New(); var queueRefs = new QueueReferences("lorem", "ipsum"); var executor = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); executor.PublishAsync(message, queueRefs.TopicName, null, Arg.Any <CancellationToken>()) .Returns(new DeliveryReport <Guid, byte[]>() { Status = PersistenceStatus.NotPersisted }); var factory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); factory.Create(message).ReturnsForAnyArgs(queueRefs); var sut = new KafkaPublisher(executor, factory); await Assert.ThrowsAsync <InvalidOperationException>(async() => await sut.PublishAsync(message)); }
public void Create_should_recreate_sender_when_null() { var serviceBusClient = NSubstitute.Substitute.ForPartsOf <ServiceBusClient>(); serviceBusClient.WhenForAnyArgs(c => c.CreateSender(Arg.Any <string>())) .DoNotCallBase(); serviceBusClient.CreateSender(Arg.Any <string>()).ReturnsNullForAnyArgs(); var factory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); var references = new QueueReferences("lorem", "ipsum"); factory.Create <DummyMessage>() .Returns(references); var sut = new ServiceBusSenderFactory(factory, serviceBusClient); sut.Create <DummyMessage>(); serviceBusClient.Received(2) .CreateSender(references.TopicName); }
public async Task StartAsync_should_process_incoming_messages() { var consumeResult = new ConsumeResult <Guid, byte[]>(); var expectedMessage = NSubstitute.Substitute.For <IMessage>(); var queueRefs = new QueueReferences("lorem", "ipsum"); var parser = NSubstitute.Substitute.For <IMessageParser>(); parser.Parse(consumeResult) .Returns(expectedMessage); var messageProcessor = NSubstitute.Substitute.For <IMessageProcessor>(); var publisher = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); var logger = NSubstitute.Substitute.For <ILogger <KafkaMessageHandler> >(); var sysInfo = SystemInfo.New(); var sut = new KafkaMessageHandler(parser, messageProcessor, publisher, logger, sysInfo); await sut.HandleAsync(consumeResult, queueRefs); await messageProcessor.Received().ProcessAsync((dynamic)expectedMessage, Arg.Any <CancellationToken>()); }
public void Create_should_return_Processor() { var serviceBusClient = NSubstitute.Substitute.ForPartsOf <ServiceBusClient>(); var processor = NSubstitute.Substitute.ForPartsOf <ServiceBusProcessor>(); serviceBusClient.WhenForAnyArgs(c => c.CreateProcessor(Arg.Any <string>(), Arg.Any <string>())) .DoNotCallBase(); serviceBusClient.CreateProcessor(Arg.Any <string>(), Arg.Any <string>()) .ReturnsForAnyArgs(processor); var factory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); var references = new QueueReferences("lorem", "ipsum"); factory.Create <DummyMessage>() .Returns(references); var sut = new ServiceBusProcessorFactory(factory, serviceBusClient); var result = sut.Create <DummyMessage>(); result.Should().Be(processor); serviceBusClient.Received(1) .CreateProcessor(references.TopicName, references.SubscriptionName); }
public async Task PublishAsync_publish_message() { var message = DummyMessage.New(); var queueRefs = new QueueReferences("lorem", "ipsum"); var executor = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); executor.PublishAsync(message, queueRefs.TopicName, null, Arg.Any <CancellationToken>()) .Returns(new DeliveryReport <Guid, byte[]>() { Status = PersistenceStatus.Persisted }); var factory = NSubstitute.Substitute.For <IQueueReferenceFactory>(); factory.Create(message).ReturnsForAnyArgs(queueRefs); var sut = new KafkaPublisher(executor, factory); await sut.PublishAsync(message); await executor.Received(1) .PublishAsync(message, queueRefs.TopicName); }
public async Task StartAsync_should_hanle_null_messages() { var consumeResult = new ConsumeResult <Guid, byte[]>(); var queueRefs = new QueueReferences("lorem", "ipsum"); var parser = NSubstitute.Substitute.For <IMessageParser>(); parser.Parse(consumeResult) .ReturnsNull(); var messageProcessor = NSubstitute.Substitute.For <IMessageProcessor>(); var publisher = NSubstitute.Substitute.For <IKafkaPublisherExecutor>(); var logger = NSubstitute.Substitute.For <ILogger <KafkaMessageHandler> >(); var sysInfo = SystemInfo.New(); var sut = new KafkaMessageHandler(parser, messageProcessor, publisher, logger, sysInfo); await sut.HandleAsync(consumeResult, queueRefs); await messageProcessor.DidNotReceiveWithAnyArgs() .ProcessAsync(Arg.Any <IMessage>(), Arg.Any <CancellationToken>()); }
public KafkaSubscriber(IConsumerBuilderFactory builderFactory, IQueueReferenceFactory queueReferenceFactory, IKafkaMessageHandler messageHandler, ILogger <KafkaSubscriber <TM> > logger, KafkaSubscriberConfig config = null) { if (builderFactory is null) { throw new ArgumentNullException(nameof(builderFactory)); } if (queueReferenceFactory is null) { throw new ArgumentNullException(nameof(queueReferenceFactory)); } var builder = builderFactory.Create <TM, Guid, byte[]>(); _consumer = builder.Build(); _queueReferences = queueReferenceFactory.Create <TM>(); _messageHandler = messageHandler ?? throw new ArgumentNullException(nameof(messageHandler)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _config = config ?? KafkaSubscriberConfig.Default; }