public async Task ChunkingWithBatchConsuming_CorrectlyConsumedInBatchAndAggregated() { var message1 = new TestEventOne { Content = "Hello E2E!" }; var message2 = new TestEventOne { Content = "Hello E2E!" }; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Batch = new BatchSettings { Size = 2 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message1); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(3); SpyBehavior.InboundEnvelopes.Should().BeEmpty(); await publisher.PublishAsync(message2); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(6); SpyBehavior.OutboundEnvelopes.ForEach( envelope => { envelope.RawMessage.Should().NotBeNull(); envelope.RawMessage !.Length.Should().BeLessOrEqualTo(10); }); SpyBehavior.InboundEnvelopes.Should().HaveCount(2); }
public async Task EncryptionAndChunking_EncryptedAndChunkedThenAggregatedAndDecrypted() { var message = new TestEventOne { Content = "Hello E2E!" }; var rawMessage = await Endpoint.DefaultSerializer.SerializeAsync( message, new MessageHeaderCollection(), MessageSerializationContext.Empty) ?? throw new InvalidOperationException("Serializer returned null"); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 }, Encryption = new SymmetricEncryptionSettings { Key = AesEncryptionKey } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Encryption = new SymmetricEncryptionSettings { Key = AesEncryptionKey } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(5); SpyBehavior.OutboundEnvelopes[0].RawMessage.Should().NotBeEquivalentTo(rawMessage.Read(10)); SpyBehavior.OutboundEnvelopes.ForEach( envelope => { envelope.RawMessage.Should().NotBeNull(); envelope.RawMessage !.Length.Should().BeLessOrEqualTo(10); }); SpyBehavior.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes[0].Message.Should().BeEquivalentTo(message); }
public async Task RetryAndSkipPolicies_JsonChunkSequenceStillFailingAfterRetries_OffsetCommitted() { var tryCount = 0; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, ErrorPolicy = ErrorPolicy.Chain( ErrorPolicy.Retry().MaxFailedAttempts(10), ErrorPolicy.Skip()) })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddDelegateSubscriber( (IIntegrationEvent _, IServiceProvider sp) => { var logger = sp.GetRequiredService <ISilverbackLogger <ErrorHandlingTests> >(); tryCount++; logger.LogInformation($"Handling message ({tryCount})..."); throw new InvalidOperationException("Retry!"); })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Hello E2E!" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); tryCount.Should().Be(11); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3); }
public async Task OutboxProduceStrategy_DefaultSettings_ProducedAndConsumed() { var serviceProvider = Host .WithTestDbContext() .ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .UseDbContext <TestDbContext>() .WithConnectionToMessageBroker( options => options .AddMockedKafka() .AddOutboxDatabaseTable() .AddOutboxWorker(TimeSpan.FromMilliseconds(100))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Strategy = ProduceStrategy.Outbox() }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); var dbContext = serviceProvider.GetRequiredService <TestDbContext>(); for (int i = 1; i <= 15; i++) { await publisher.PublishAsync(new TestEventOne { Content = $"{i}" }); } await dbContext.SaveChangesAsync(); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.OutboundEnvelopes.Should().HaveCount(15); Subscriber.InboundEnvelopes.Should().HaveCount(15); SpyBehavior.OutboundEnvelopes.Should().HaveCount(15); SpyBehavior.InboundEnvelopes.Should().HaveCount(15); SpyBehavior.InboundEnvelopes .Select(envelope => ((TestEventOne)envelope.Message !).Content) .Should().BeEquivalentTo(Enumerable.Range(1, 15).Select(i => $"{i}")); }
public async Task Rebalance_DefaultSettings_ProducedAndConsumedAfterRebalance() { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 5; i++) { await publisher.PublishAsync( new TestEventOne { Content = $"{i}" }); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.OutboundEnvelopes.Should().HaveCount(5); Subscriber.InboundEnvelopes.Should().HaveCount(5); DefaultTopic.Rebalance(); for (int i = 1; i <= 5; i++) { await publisher.PublishAsync( new TestEventOne { Content = $"{i}" }); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.OutboundEnvelopes.Should().HaveCount(10); Subscriber.InboundEnvelopes.Should().HaveCount(10); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10); }
public async Task OutboundAndInbound_MultipleTopicsWithSingleConsumer_ProducedAndConsumed() { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint("topic1")) .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint("topic2")) .AddInbound( new KafkaConsumerEndpoint("topic1", "topic2") { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 5; i++) { await publisher.PublishAsync(new TestEventOne { Content = $"{i}" }); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.OutboundEnvelopes.Should().HaveCount(10); Subscriber.InboundEnvelopes.Should().HaveCount(10); SpyBehavior.OutboundEnvelopes.Should().HaveCount(10); SpyBehavior.InboundEnvelopes.Should().HaveCount(10); var receivedContentsTopic1 = SpyBehavior.InboundEnvelopes .Where(envelope => envelope.ActualEndpointName == "topic1") .Select(envelope => ((TestEventOne)envelope.Message !).Content); var receivedContentsTopic2 = SpyBehavior.InboundEnvelopes .Where(envelope => envelope.ActualEndpointName == "topic2") .Select(envelope => ((TestEventOne)envelope.Message !).Content); var expectedMessages = Enumerable.Range(1, 5).Select(i => $"{i}").ToList(); receivedContentsTopic1.Should().BeEquivalentTo(expectedMessages); receivedContentsTopic2.Should().BeEquivalentTo(expectedMessages); }
public async Task Streaming_DisconnectWhileObserving_ObserverCompleted() { bool completed = false; var receivedMessages = new ConcurrentBag <TestEventOne>(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .AsObservable() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddDelegateSubscriber( (IMessageStreamObservable <TestEventOne> observable) => observable.Subscribe( message => receivedMessages.Add(message), () => completed = true))) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Message 1" }); await publisher.PublishAsync( new TestEventOne { Content = "Message 2" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count >= 2); receivedMessages.Should().HaveCount(2); await Broker.DisconnectAsync(); completed.Should().BeTrue(); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2); }
public async Task OutboundAndInbound_MultipleConsumersSameConsumerGroup_ProducedAndConsumed() { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 10; i++) { await publisher.PublishAsync(new TestEventOne { Content = $"{i}" }); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.OutboundEnvelopes.Should().HaveCount(10); Subscriber.InboundEnvelopes.Should().HaveCount(10); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10); SpyBehavior.OutboundEnvelopes.Should().HaveCount(10); SpyBehavior.InboundEnvelopes.Should().HaveCount(10); SpyBehavior.InboundEnvelopes .Select(envelope => ((TestEventOne)envelope.Message !).Content) .Distinct() .Should().BeEquivalentTo(Enumerable.Range(1, 10).Select(i => $"{i}")); }
public async Task Streaming_NotProcessingPartitionsIndependently_PublishedSingleStream() { var receivedMessages = new ConcurrentBag <TestEventOne>(); var receivedStreams = new ConcurrentBag <IMessageStreamEnumerable <TestEventOne> >(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(3))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, ProcessPartitionsIndependently = false })) .AddDelegateSubscriber( async(IMessageStreamEnumerable <TestEventOne> eventsStream) => { receivedStreams.Add(eventsStream); await foreach (var message in eventsStream) { receivedMessages.Add(message); } })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 15; i++) { await publisher.PublishAsync(new TestEventOne { Content = $"{i}" }); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); receivedStreams.Should().HaveCount(1); receivedMessages.Should().HaveCount(15); receivedMessages.Select(message => message.Content) .Should().BeEquivalentTo(Enumerable.Range(1, 15).Select(i => $"{i}")); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15); }
public async Task Streaming_UnboundedObservable_MessagesReceived() { var receivedMessages = new List <TestEventOne>(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .AsObservable() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 } })) .AddDelegateSubscriber( (IMessageStreamObservable <TestEventOne> observable) => observable.Subscribe( message => { DefaultTopic.GetCommittedOffsetsCount("consumer1") .Should().Be(receivedMessages.Count); receivedMessages.Add(message); }))) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 3; i++) { await publisher.PublishAsync(new TestEventOne { Content = $"{i}" }); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); receivedMessages.Should().HaveCount(3); receivedMessages.Select(message => message.Content) .Should().BeEquivalentTo(Enumerable.Range(1, 3).Select(i => $"{i}")); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3); }
public async Task Inbound_WithHardcodedMessageType_MessageTypeHeaderIgnored() { var message = new TestEventOne { Content = "Hello E2E!" }; byte[] rawMessage = (await Endpoint.DefaultSerializer.SerializeAsync( message, new MessageHeaderCollection(), MessageSerializationContext.Empty)).ReadAll() ?? throw new InvalidOperationException("Serializer returned null"); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 }, Serializer = new JsonMessageSerializer <TestEventOne>() })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var broker = serviceProvider.GetRequiredService <IBroker>(); var producer = broker.GetProducer(new KafkaProducerEndpoint(DefaultTopicName)); await producer.RawProduceAsync( rawMessage, new MessageHeaderCollection { { DefaultMessageHeaders.MessageType, "Silverback.Bad.TestEventOne, Silverback.Bad" } }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes[0].Message.Should().BeOfType <TestEventOne>(); }
public async Task Retention_ProducedLotOfMessages_FirstMessagesRemoved() { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 100; i++) { await publisher.PublishAsync(new TestEventOne()); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.InboundEnvelopes.Should().HaveCount(100); DefaultTopic.GetFirstOffset(new Partition(0)).Should().Be(new Offset(0)); DefaultTopic.GetLastOffset(new Partition(0)).Should().Be(new Offset(99)); for (int i = 1; i <= 10; i++) { await publisher.PublishAsync(new TestEventOne()); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.InboundEnvelopes.Should().HaveCount(110); DefaultTopic.GetFirstOffset(new Partition(0)).Should().Be(new Offset(10)); DefaultTopic.GetLastOffset(new Partition(0)).Should().Be(new Offset(109)); }
public async Task Inbound_ThrowIfUnhandled_ConsumerStoppedIfMessageIsNotHandled() { var received = 0; Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = { GroupId = "consumer1", AutoCommitIntervalMs = 100 }, ThrowIfUnhandled = true })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddDelegateSubscriber((TestEventOne _) => received++)) .Run(); var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Handled message" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); received.Should().Be(1); await publisher.PublishAsync( new TestEventTwo { Content = "Unhandled message" }); await AsyncTestingUtil.WaitAsync(() => Broker.Consumers[0].IsConnected == false); Broker.Consumers[0].IsConnected.Should().BeFalse(); }
public async Task RetryPolicy_SuccessfulAfterSomeTries_OffsetCommitted() { var tryCount = 0; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10) })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddDelegateSubscriber( (IIntegrationEvent _) => { tryCount++; if (tryCount != 3) { throw new InvalidOperationException("Retry!"); } })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Hello E2E!" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); tryCount.Should().Be(3); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(1); }
public async Task Inbound_WithAndWithoutAutoCommit_OffsetCommitted(bool enableAutoCommit) { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = enableAutoCommit, AutoCommitIntervalMs = 50, CommitOffsetEach = enableAutoCommit ? -1 : 3 } })) .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "one" }); await publisher.PublishAsync( new TestEventOne { Content = "two" }); await publisher.PublishAsync( new TestEventOne { Content = "three" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3); }
public async Task RawProducer_ByteArray_ProducedAndConsumed() { var message = new TestEventOne { Content = "Hello E2E!" }; var headers = new MessageHeaderCollection(); byte[] rawMessage = (await Endpoint.DefaultSerializer.SerializeAsync( message, headers, MessageSerializationContext.Empty)).ReadAll() ?? throw new InvalidOperationException("Serializer returned null"); Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var producer = Broker.GetProducer(new KafkaProducerEndpoint(DefaultTopicName)); await producer.ProduceAsync(rawMessage, headers); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes[0].Message.Should().BeOfType <TestEventOne>(); SpyBehavior.InboundEnvelopes[0].Message.As <TestEventOne>().Content.Should().BeEquivalentTo("Hello E2E!"); }
public async Task ChunkingWithRemappedHeaderNames_HeadersRemappedAndMessageReceived() { var message = new TestEventOne { Content = "Hello E2E!" }; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 } }) .AddInbound(new KafkaConsumerEndpoint(DefaultTopicName))) .WithCustomHeaderName(DefaultMessageHeaders.ChunkIndex, "x-ch-id") .WithCustomHeaderName(DefaultMessageHeaders.ChunksCount, "x-ch-cnt") .AddSingletonBrokerBehavior <SpyBrokerBehavior>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.ForEach( envelope => { envelope.Headers.GetValue("x-ch-id").Should().NotBeNullOrEmpty(); envelope.Headers.GetValue("x-ch-cnt").Should().NotBeNullOrEmpty(); envelope.Headers.GetValue(DefaultMessageHeaders.ChunkIndex).Should().BeNull(); envelope.Headers.GetValue(DefaultMessageHeaders.ChunksCount).Should().BeNull(); }); SpyBehavior.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes[0].Message.Should().BeEquivalentTo(message); }
public async Task RetryPolicy_ProcessingRetriedMultipleTimes() { var message = new TestEventOne { Content = "Hello E2E!" }; var tryCount = 0; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 }, ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10) })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddDelegateSubscriber( (IIntegrationEvent _) => { tryCount++; throw new InvalidOperationException("Retry!"); })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(1); tryCount.Should().Be(11); SpyBehavior.InboundEnvelopes.Should().HaveCount(11); SpyBehavior.InboundEnvelopes.ForEach(envelope => envelope.Message.Should().BeEquivalentTo(message)); }
public async Task BatchConsuming_CorrectlyConsumedInBatch() { var message1 = new TestEventOne { Content = "Hello E2E!" }; var message2 = new TestEventOne { Content = "Hello E2E!" }; Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Batch = new BatchSettings { Size = 2 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>()) .Run(); var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message1); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes.Should().BeEmpty(); await publisher.PublishAsync(message2); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(2); SpyBehavior.InboundEnvelopes.Should().HaveCount(2); }
public async Task StatusInfo_ConsumingAndDisconnecting_StatusHistoryRecorded() { Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var broker = Host.ScopedServiceProvider.GetRequiredService <IBroker>(); broker.Consumers[0].StatusInfo.History.Should().HaveCount(1); broker.Consumers[0].StatusInfo.History.Last().Status.Should().Be(ConsumerStatus.Connected); broker.Consumers[0].StatusInfo.History.Last().Timestamp.Should().NotBeNull(); var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(new TestEventOne()); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); broker.Consumers[0].StatusInfo.History.Should().HaveCount(2); broker.Consumers[0].StatusInfo.History.Last().Status.Should().Be(ConsumerStatus.Consuming); broker.Consumers[0].StatusInfo.History.Last().Timestamp.Should().NotBeNull(); await Broker.DisconnectAsync(); broker.Consumers[0].StatusInfo.History.Should().HaveCount(3); broker.Consumers[0].StatusInfo.History.Last().Status.Should().Be(ConsumerStatus.Disconnected); broker.Consumers[0].StatusInfo.History.Last().Timestamp.Should().NotBeNull(); }
public async Task OutboundAndInbound_WithHardcodedMessageType_ProducedAndConsumed() { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Serializer = new JsonMessageSerializer <TestEventOne>() }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 }, Serializer = new JsonMessageSerializer <TestEventOne>() })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(new TestEventOne()); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.OutboundEnvelopes.Should().HaveCount(1); Subscriber.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.OutboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.OutboundEnvelopes[0].Headers.Should() .NotContain(header => header.Name == DefaultMessageHeaders.MessageType); SpyBehavior.InboundEnvelopes[0].Message.Should().BeOfType <TestEventOne>(); }
public async Task OutboundAndInbound_MessageWithCustomHeaders_HeadersTransferred() { var message = new TestEventWithHeaders { Content = "Hello E2E!", CustomHeader = "Hello header!", CustomHeader2 = false }; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.InboundEnvelopes.Should().HaveCount(1); SpyBehavior.InboundEnvelopes[0].Message.Should().BeEquivalentTo(message); SpyBehavior.InboundEnvelopes[0].Headers.Should().ContainEquivalentOf( new MessageHeader("x-custom-header", "Hello header!")); SpyBehavior.InboundEnvelopes[0].Headers.Should().ContainEquivalentOf( new MessageHeader("x-custom-header2", "False")); }
public async Task StatusInfo_Consuming_LatestConsumedMessageTracked() { Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(new TestEventOne()); await publisher.PublishAsync(new TestEventOne()); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); var broker = Host.ScopedServiceProvider.GetRequiredService <IBroker>(); broker.Consumers[0].StatusInfo.LatestConsumedMessageIdentifier.Should().BeOfType <KafkaOffset>(); broker.Consumers[0].StatusInfo.LatestConsumedMessageIdentifier.As <KafkaOffset>().Offset.Should().Be(1); broker.Consumers[0].StatusInfo.LatestConsumedMessageTimestamp.Should().NotBeNull(); }
public async Task Streaming_FromMultiplePartitionsWithLimitedParallelism_ConcurrencyLimited() { var receivedMessages = new List <TestEventWithKafkaKey>(); var taskCompletionSource = new TaskCompletionSource <bool>(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(5))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 }, MaxDegreeOfParallelism = 2 })) .AddDelegateSubscriber( async(IMessageStreamEnumerable <TestEventWithKafkaKey> eventsStream) => { await foreach (var message in eventsStream) { lock (receivedMessages) { receivedMessages.Add(message); } await taskCompletionSource.Task; } })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 3; i++) { await publisher.PublishAsync( new TestEventWithKafkaKey { KafkaKey = 1, Content = $"{i}" }); await publisher.PublishAsync( new TestEventWithKafkaKey { KafkaKey = 2, Content = $"{i}" }); await publisher.PublishAsync( new TestEventWithKafkaKey { KafkaKey = 3, Content = $"{i}" }); await publisher.PublishAsync( new TestEventWithKafkaKey { KafkaKey = 4, Content = $"{i}" }); } await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count >= 2); await Task.Delay(100); try { receivedMessages.Should().HaveCount(2); } finally { taskCompletionSource.SetResult(true); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); receivedMessages.Should().HaveCount(12); }
public async Task Streaming_UnboundedObservableProcessingFailed_ConsumerStopped() { var receivedMessages = new List <TestEventOne>(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .AsObservable() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 } })) .AddDelegateSubscriber( (IMessageStreamObservable <TestEventOne> observable) => observable.Subscribe( message => { receivedMessages.Add(message); if (receivedMessages.Count == 2) { throw new InvalidOperationException("Test"); } }))) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Message 1" }); await publisher.PublishAsync( new TestEventOne { Content = "Message 2" }); await publisher.PublishAsync( new TestEventOne { Content = "Message 3" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count >= 2); receivedMessages.Should().HaveCount(2); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(1); Broker.Consumers[0].IsConnected.Should().BeFalse(); }
public async Task Streaming_DisconnectWhileEnumerating_EnumerationAborted() { bool aborted = false; var receivedMessages = new List <TestEventOne>(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddDelegateSubscriber( (IMessageStreamEnumerable <TestEventOne> eventsStream) => { try { foreach (var message in eventsStream) { receivedMessages.Add(message); } } catch (OperationCanceledException) { aborted = true; } })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Message 1" }); await publisher.PublishAsync( new TestEventOne { Content = "Message 2" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); receivedMessages.Should().HaveCount(2); await Broker.DisconnectAsync(); await AsyncTestingUtil.WaitAsync(() => aborted); // TODO: Necessary? aborted.Should().BeTrue(); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2); }
public async Task Batch_WithTimeout_IncompleteBatchCompletedAfterTimeout() { var receivedBatches = new List <List <TestEventOne> >(); var completedBatches = 0; Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, Batch = new BatchSettings { Size = 10, MaxWaitTime = TimeSpan.FromMilliseconds(500) } })) .AddDelegateSubscriber( async(IMessageStreamEnumerable <TestEventOne> eventsStream) => { var list = new List <TestEventOne>(); receivedBatches.Add(list); await foreach (var message in eventsStream) { list.Add(message); } completedBatches++; })) .Run(); var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 15; i++) { await publisher.PublishAsync(new TestEventOne { Content = $"{i}" }); } await AsyncTestingUtil.WaitAsync(() => receivedBatches.Sum(batch => batch.Count) == 15); receivedBatches.Should().HaveCount(2); receivedBatches[0].Should().HaveCount(10); receivedBatches[1].Should().HaveCount(5); completedBatches.Should().Be(1); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); receivedBatches.Should().HaveCount(2); receivedBatches[0].Should().HaveCount(10); receivedBatches[1].Should().HaveCount(5); completedBatches.Should().Be(2); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15); }
public async Task RetryPolicy_EncryptedAndChunkedMessage_RetriedMultipleTimes() { var message = new TestEventOne { Content = "Hello E2E!" }; var rawMessage = await Endpoint.DefaultSerializer.SerializeAsync( message, new MessageHeaderCollection(), MessageSerializationContext.Empty); var tryCount = 0; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker(options => options.AddMockedKafka()) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 }, Encryption = new SymmetricEncryptionSettings { Key = AesEncryptionKey } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, Encryption = new SymmetricEncryptionSettings { Key = AesEncryptionKey }, ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10) })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddDelegateSubscriber( (IIntegrationEvent _) => { tryCount++; if (tryCount != 3) { throw new InvalidOperationException("Retry!"); } })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync(message); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(6); SpyBehavior.OutboundEnvelopes[0].RawMessage.ReReadAll().Should().NotBeEquivalentTo(rawMessage.Read(10)); SpyBehavior.OutboundEnvelopes.ForEach( envelope => { envelope.RawMessage.Should().NotBeNull(); envelope.RawMessage !.Length.Should().BeLessOrEqualTo(10); }); SpyBehavior.InboundEnvelopes.Should().HaveCount(3); SpyBehavior.InboundEnvelopes.ForEach(envelope => envelope.Message.Should().BeEquivalentTo(message)); }
public async Task RetryPolicy_BinaryFileChunkSequenceProcessedAfterSomeTries_RetriedMultipleTimesAndCommitted() { var message1 = new BinaryFileMessage { Content = new MemoryStream( new byte[] { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x30 }), ContentType = "application/pdf" }; var message2 = new BinaryFileMessage { Content = new MemoryStream( new byte[] { 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x40, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x50 }), ContentType = "text/plain" }; var tryCount = 0; var receivedFiles = new List <byte[]?>(); var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IBinaryFileMessage>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10) })) .AddDelegateSubscriber( (BinaryFileMessage binaryFile) => { if (binaryFile.ContentType != "text/plain") { tryCount++; if (tryCount != 2) { // Read only first chunk var buffer = new byte[10]; binaryFile.Content !.Read(buffer, 0, 10); throw new InvalidOperationException("Retry!"); } } lock (receivedFiles) { receivedFiles.Add(binaryFile.Content.ReadAll()); } }) .AddSingletonBrokerBehavior <SpyBrokerBehavior>()) .Run(); var publisher = serviceProvider.GetRequiredService <IPublisher>(); await publisher.PublishAsync(message1); await publisher.PublishAsync(message2); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); tryCount.Should().Be(2); SpyBehavior.OutboundEnvelopes.Should().HaveCount(6); SpyBehavior.OutboundEnvelopes.ForEach(envelope => envelope.RawMessage.ReReadAll() !.Length.Should().Be(10)); SpyBehavior.InboundEnvelopes.Should().HaveCount(3); SpyBehavior.InboundEnvelopes[0].Message.As <BinaryFileMessage>().ContentType.Should().Be("application/pdf"); SpyBehavior.InboundEnvelopes[1].Message.As <BinaryFileMessage>().ContentType.Should().Be("application/pdf"); SpyBehavior.InboundEnvelopes[2].Message.As <BinaryFileMessage>().ContentType.Should().Be("text/plain"); receivedFiles.Should().HaveCount(2); receivedFiles[0].Should().BeEquivalentTo(message1.Content.ReReadAll()); receivedFiles[1].Should().BeEquivalentTo(message2.Content.ReReadAll()); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(6); }
public async Task RetryPolicy_JsonChunkSequenceProcessedAfterSomeTries_RetriedMultipleTimesAndCommitted() { var tryCount = 0; var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>( new KafkaProducerEndpoint(DefaultTopicName) { Chunk = new ChunkSettings { Size = 10 } }) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", EnableAutoCommit = false, CommitOffsetEach = 1 }, ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10) })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddDelegateSubscriber( (IIntegrationEvent _) => { tryCount++; if (tryCount % 2 != 0) { throw new InvalidOperationException("Retry!"); } })) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); await publisher.PublishAsync( new TestEventOne { Content = "Long message one" }); await publisher.PublishAsync( new TestEventOne { Content = "Long message two" }); await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); SpyBehavior.OutboundEnvelopes.Should().HaveCount(6); SpyBehavior.OutboundEnvelopes.ForEach( envelope => { envelope.RawMessage.Should().NotBeNull(); envelope.RawMessage !.Length.Should().BeLessOrEqualTo(10); }); tryCount.Should().Be(4); SpyBehavior.InboundEnvelopes.Should().HaveCount(4); SpyBehavior.InboundEnvelopes[0].Message.As <TestEventOne>().Content.Should().Be("Long message one"); SpyBehavior.InboundEnvelopes[1].Message.As <TestEventOne>().Content.Should().Be("Long message one"); SpyBehavior.InboundEnvelopes[2].Message.As <TestEventOne>().Content.Should().Be("Long message two"); SpyBehavior.InboundEnvelopes[3].Message.As <TestEventOne>().Content.Should().Be("Long message two"); DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(6); }