예제 #1
0
        public async Task Streaming_NotProcessingPartitionsIndependently_PublishedSingleStream()
        {
            var receivedMessages = new ConcurrentBag <TestEventOne>();
            var receivedStreams  = new ConcurrentBag <IMessageStreamEnumerable <TestEventOne> >();

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(3)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .ProcessAllPartitionsTogether()
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = false;
                config.CommitOffsetEach = 1;
            })))
                .AddDelegateSubscriber(
                    async(IMessageStreamEnumerable <TestEventOne> eventsStream) =>
            {
                receivedStreams.Add(eventsStream);
                await foreach (var message in eventsStream)
                {
                    receivedMessages.Add(message);
                }
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            receivedStreams.Should().HaveCount(1);
            receivedMessages.Should().HaveCount(15);
            receivedMessages.Select(message => message.Content)
            .Should().BeEquivalentTo(Enumerable.Range(1, 15).Select(i => $"{i}"));

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }
예제 #2
0
        public async Task Streaming_DisconnectWhileObserving_ObserverCompleted()
        {
            bool completed        = false;
            var  receivedMessages = new ConcurrentBag <TestEventOne>();

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .AsObservable()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId = "consumer1";
            })))
                .AddDelegateSubscriber(
                    (IMessageStreamObservable <TestEventOne> observable) =>
                    observable.Subscribe(
                        message => receivedMessages.Add(message),
                        () => completed = true)))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 1"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 2"
            });

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count >= 2);

            receivedMessages.Should().HaveCount(2);

            await Helper.Broker.DisconnectAsync();

            completed.Should().BeTrue();
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2);
        }
예제 #3
0
        public async Task Streaming_UnboundedObservable_MessagesReceived()
        {
            var receivedMessages = new List <TestEventOne>();

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .AsObservable()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = false;
                config.CommitOffsetEach = 1;
            })))
                .AddDelegateSubscriber(
                    (IMessageStreamObservable <TestEventOne> observable) =>
                    observable.Subscribe(
                        message =>
            {
                DefaultTopic.GetCommittedOffsetsCount("consumer1")
                .Should().Be(receivedMessages.Count);

                receivedMessages.Add(message);
            })))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 3; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            receivedMessages.Should().HaveCount(3);
            receivedMessages.Select(message => message.Content)
            .Should().BeEquivalentTo(Enumerable.Range(1, 3).Select(i => $"{i}"));

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3);
        }
예제 #4
0
        public async Task RetryAndSkipPolicies_JsonChunkSequenceStillFailingAfterRetries_OffsetCommitted()
        {
            var tryCount = 0;

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(
                        new KafkaProducerEndpoint(DefaultTopicName)
            {
                Chunk = new ChunkSettings
                {
                    Size = 10
                }
            })
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                ErrorPolicy = ErrorPolicy.Chain(
                    ErrorPolicy.Retry().MaxFailedAttempts(10),
                    ErrorPolicy.Skip())
            }))
                .AddSingletonBrokerBehavior <SpyBrokerBehavior>()
                .AddDelegateSubscriber(
                    (IIntegrationEvent _, IServiceProvider sp) =>
            {
                var logger = sp.GetRequiredService <ISilverbackLogger <ErrorHandlingTests> >();
                tryCount++;
                logger.LogInformation($"Handling message ({tryCount})...");
                throw new InvalidOperationException("Retry!");
            }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Hello E2E!"
            });

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            tryCount.Should().Be(11);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3);
        }
예제 #5
0
        public async Task Rebalance_DefaultSettings_ProducedAndConsumedAfterRebalance()
        {
            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId = "consumer1",
                    AutoCommitIntervalMs = 100
                }
            }))
                .AddSingletonSubscriber <OutboundInboundSubscriber>())
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 5; i++)
            {
                await publisher.PublishAsync(
                    new TestEventOne
                {
                    Content = $"{i}"
                });
            }

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            Subscriber.OutboundEnvelopes.Should().HaveCount(5);
            Subscriber.InboundEnvelopes.Should().HaveCount(5);

            DefaultTopic.Rebalance();

            for (int i = 1; i <= 5; i++)
            {
                await publisher.PublishAsync(
                    new TestEventOne
                {
                    Content = $"{i}"
                });
            }

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            Subscriber.OutboundEnvelopes.Should().HaveCount(10);
            Subscriber.InboundEnvelopes.Should().HaveCount(10);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);
        }
예제 #6
0
        public async Task DisconnectAsync_WithoutAutoCommit_PendingOffsetsCommitted()
        {
            int receivedMessages = 0;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = false;
                config.CommitOffsetEach = 10;
            })))
                .AddDelegateSubscriber((TestEventOne _) => receivedMessages++))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "one"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "two"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "three"
            });

            await AsyncTestingUtil.WaitAsync(() => receivedMessages == 3);

            await Helper.Broker.DisconnectAsync();

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3);
        }
예제 #7
0
        public async Task Rebalance_DefaultSettings_ProducedAndConsumedAfterRebalance()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://e2e"; })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(config => { config.GroupId = "consumer1"; })))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 5; i++)
            {
                await publisher.PublishAsync(
                    new TestEventOne
                {
                    Content = $"{i}"
                });
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.OutboundEnvelopes.Should().HaveCount(5);
            Helper.Spy.InboundEnvelopes.Should().HaveCount(5);

            DefaultTopic.Rebalance();

            for (int i = 1; i <= 5; i++)
            {
                await publisher.PublishAsync(
                    new TestEventOne
                {
                    Content = $"{i}"
                });
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.OutboundEnvelopes.Should().HaveCount(10);
            Helper.Spy.InboundEnvelopes.Should().HaveCount(10);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);
        }
예제 #8
0
        public async Task OutboundAndInbound_MultipleConsumersSameConsumerGroup_ProducedAndConsumed()
        {
            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId = "consumer1",
                    AutoCommitIntervalMs = 100
                }
            })
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId = "consumer1",
                    AutoCommitIntervalMs = 100
                }
            }))
                .AddSingletonBrokerBehavior <SpyBrokerBehavior>()
                .AddSingletonSubscriber <OutboundInboundSubscriber>())
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 10; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            Subscriber.OutboundEnvelopes.Should().HaveCount(10);
            Subscriber.InboundEnvelopes.Should().HaveCount(10);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);

            SpyBehavior.OutboundEnvelopes.Should().HaveCount(10);
            SpyBehavior.InboundEnvelopes.Should().HaveCount(10);
            SpyBehavior.InboundEnvelopes
            .Select(envelope => ((TestEventOne)envelope.Message !).Content)
            .Distinct()
            .Should().BeEquivalentTo(Enumerable.Range(1, 10).Select(i => $"{i}"));
        }
예제 #9
0
        public async Task Inbound_IgnoreUnhandledMessages_UnhandledMessageIgnored()
        {
            var received = 0;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId = "consumer1";
            })
                        .IgnoreUnhandledMessages()))
                .AddDelegateSubscriber((TestEventOne _) => received++))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Handled message"
            });

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            received.Should().Be(1);

            await publisher.PublishAsync(
                new TestEventTwo
            {
                Content = "Unhandled message"
            });

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            received.Should().Be(1);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2);
        }
        public async Task Validation_ThrowException_InvalidMessageNotConsumed()
        {
            bool received = false;

            Host.ConfigureServices(
                services =>
            {
                services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
                {
                    config.BootstrapServers = "PLAINTEXT://tests";
                })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound <TestValidationMessage>(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .ValidateMessage(true)
                        .Configure(
                            config =>
                {
                    config.GroupId = "consumer1";
                })))
                .AddDelegateSubscriber(
                    (IInboundEnvelope _) =>
                {
                    received = true;
                })
                .AddIntegrationSpyAndSubscriber();
            })
            .Run();

            var producer = Helper.Broker.GetProducer(DefaultTopicName);

            await producer.ProduceAsync(Encoding.UTF8.GetBytes("{\"String10\": \"1234567890abcd\"}"));

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.OutboundEnvelopes.Should().HaveCount(1);
            Helper.Spy.InboundEnvelopes.Should().HaveCount(0);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(0);
            received.Should().BeFalse();

            await AsyncTestingUtil.WaitAsync(() => Helper.Broker.Consumers[0].IsConnected == false);

            Helper.Broker.Consumers[0].IsConnected.Should().BeFalse();
        }
예제 #11
0
        public async Task Inbound_WithAndWithoutAutoCommit_OffsetCommitted(bool enableAutoCommit)
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = enableAutoCommit;
                config.CommitOffsetEach = enableAutoCommit ? -1 : 3;
            })))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "one"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "two"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "three"
            });

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3);
        }
예제 #12
0
        public async Task Encryption_SimpleMessages_EncryptedAndDecrypted()
        {
            var message1 = new TestEventOne {
                Content = "Message 1"
            };
            var message2 = new TestEventOne {
                Content = "Message 2"
            };

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://e2e"; })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint
                        .ProduceTo(DefaultTopicName)
                        .EncryptUsingAes(AesEncryptionKey))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .DecryptUsingAes(AesEncryptionKey)
                        .Configure(
                            config =>
            {
                config.GroupId = "consumer1";
            })))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(message1);

            await publisher.PublishAsync(message2);

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.OutboundEnvelopes.Should().HaveCount(2);
            Helper.Spy.OutboundEnvelopes[0].RawMessage.Should().BeOfType <SymmetricEncryptStream>();
            Helper.Spy.OutboundEnvelopes[1].RawMessage.Should().BeOfType <SymmetricEncryptStream>();

            Helper.Spy.InboundEnvelopes.Should().HaveCount(2);
            Helper.Spy.InboundEnvelopes[0].Message.Should().BeEquivalentTo(message1);
            Helper.Spy.InboundEnvelopes[1].Message.Should().BeEquivalentTo(message2);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2);
        }
예제 #13
0
        public async Task Streaming_ProcessingPartitionsIndependently_PublishedStreamPerPartition()
        {
            var receivedMessages = new ConcurrentBag <TestEventOne>();
            var receivedStreams  = new ConcurrentBag <IMessageStreamEnumerable <TestEventOne> >();

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(3)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                }
            }))
                .AddDelegateSubscriber(
                    async(IMessageStreamEnumerable <TestEventOne> eventsStream) =>
            {
                receivedStreams.Add(eventsStream);
                await foreach (var message in eventsStream)
                {
                    receivedMessages.Add(message);
                }
            }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            receivedStreams.Should().HaveCount(3);
            receivedMessages.Should().HaveCount(15);
            receivedMessages.Select(message => message.Content)
            .Should().BeEquivalentTo(Enumerable.Range(1, 15).Select(i => $"{i}"));

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }
예제 #14
0
        public async Task Rebalance_WithoutAutoCommit_PendingOffsetsCommitted()
        {
            int receivedMessages = 0;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration =
                {
                    GroupId              = "consumer1",
                    EnableAutoCommit     = false,
                    AutoCommitIntervalMs =          50,
                    CommitOffsetEach     = 10
                }
            }))
                .AddDelegateSubscriber((TestEventOne _) => receivedMessages++))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "one"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "two"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "three"
            });

            await AsyncTestingUtil.WaitAsync(() => receivedMessages == 3);

            DefaultTopic.Rebalance();

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3);
        }
예제 #15
0
        public async Task RetryPolicy_SuccessfulAfterSomeTries_OffsetCommitted()
        {
            var tryCount = 0;

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10)
            }))
                .AddSingletonBrokerBehavior <SpyBrokerBehavior>()
                .AddDelegateSubscriber(
                    (IIntegrationEvent _) =>
            {
                tryCount++;
                if (tryCount != 3)
                {
                    throw new InvalidOperationException("Retry!");
                }
            }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Hello E2E!"
            });

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            tryCount.Should().Be(3);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(1);
        }
예제 #16
0
        public async Task Inbound_WithAndWithoutAutoCommit_OffsetCommitted(bool enableAutoCommit)
        {
            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId              = "consumer1",
                    EnableAutoCommit     = enableAutoCommit,
                    AutoCommitIntervalMs = 50,
                    CommitOffsetEach     = enableAutoCommit ? -1 : 3
                }
            }))
                .AddSingletonSubscriber <OutboundInboundSubscriber>())
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "one"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "two"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "three"
            });

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(3);
        }
예제 #17
0
        public async Task OutboundAndInbound_MultipleConsumerInstances_ProducedAndConsumed()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId = "consumer1";
            }),
                        2))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 10; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.OutboundEnvelopes.Should().HaveCount(10);
            Helper.Spy.InboundEnvelopes.Should().HaveCount(10);
            Helper.Spy.InboundEnvelopes
            .Select(envelope => ((TestEventOne)envelope.Message !).Content)
            .Distinct()
            .Should().BeEquivalentTo(Enumerable.Range(1, 10).Select(i => $"{i}"));

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);
        }
예제 #18
0
        public async Task Batch_StreamEnumerationAborted_CommittedAndNextMessageConsumed()
        {
            var receivedBatches = 0;

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                Batch = new BatchSettings
                {
                    Size = 10
                }
            }))
                .AddDelegateSubscriber(
                    (IMessageStreamEnumerable <TestEventOne> eventsStream) => { receivedBatches++; }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await DefaultTopic.WaitUntilAllMessagesAreConsumedAsync();

            receivedBatches.Should().Be(15);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }
예제 #19
0
        public async Task Streaming_DisconnectWhileEnumerating_EnumerationAborted()
        {
            bool aborted          = false;
            var  receivedMessages = new List <TestEventOne>();

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId = "consumer1",
                    AutoCommitIntervalMs = 100
                }
            }))
                .AddDelegateSubscriber(
                    (IMessageStreamEnumerable <TestEventOne> eventsStream) =>
            {
                try
                {
                    foreach (var message in eventsStream)
                    {
                        receivedMessages.Add(message);
                    }
                }
                catch (OperationCanceledException)
                {
                    aborted = true;
                }
            }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 1"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 2"
            });

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            receivedMessages.Should().HaveCount(2);

            await Broker.DisconnectAsync();

            await AsyncTestingUtil.WaitAsync(() => aborted); // TODO: Necessary?

            aborted.Should().BeTrue();
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2);
        }
예제 #20
0
        public async Task RetryPolicy_BinaryFileChunkSequenceProcessedAfterSomeTries_RetriedMultipleTimesAndCommitted()
        {
            var message1 = new BinaryFileMessage
            {
                Content = new MemoryStream(
                    new byte[]
                {
                    0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10,
                    0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x20,
                    0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x30
                }),
                ContentType = "application/pdf"
            };

            var message2 = new BinaryFileMessage
            {
                Content = new MemoryStream(
                    new byte[]
                {
                    0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30,
                    0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x40,
                    0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x50
                }),
                ContentType = "text/plain"
            };

            var tryCount      = 0;
            var receivedFiles = new List <byte[]?>();

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IBinaryFileMessage>(
                        new KafkaProducerEndpoint(DefaultTopicName)
            {
                Chunk = new ChunkSettings
                {
                    Size = 10
                }
            })
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10)
            }))
                .AddDelegateSubscriber(
                    (BinaryFileMessage binaryFile) =>
            {
                if (binaryFile.ContentType != "text/plain")
                {
                    tryCount++;

                    if (tryCount != 2)
                    {
                        // Read only first chunk
                        var buffer = new byte[10];
                        binaryFile.Content !.Read(buffer, 0, 10);
                        throw new InvalidOperationException("Retry!");
                    }
                }

                lock (receivedFiles)
                {
                    receivedFiles.Add(binaryFile.Content.ReadAll());
                }
            })
                .AddSingletonBrokerBehavior <SpyBrokerBehavior>())
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IPublisher>();

            await publisher.PublishAsync(message1);

            await publisher.PublishAsync(message2);

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            tryCount.Should().Be(2);

            SpyBehavior.OutboundEnvelopes.Should().HaveCount(6);
            SpyBehavior.OutboundEnvelopes.ForEach(envelope => envelope.RawMessage.ReReadAll() !.Length.Should().Be(10));
            SpyBehavior.InboundEnvelopes.Should().HaveCount(3);

            SpyBehavior.InboundEnvelopes[0].Message.As <BinaryFileMessage>().ContentType.Should().Be("application/pdf");
            SpyBehavior.InboundEnvelopes[1].Message.As <BinaryFileMessage>().ContentType.Should().Be("application/pdf");
            SpyBehavior.InboundEnvelopes[2].Message.As <BinaryFileMessage>().ContentType.Should().Be("text/plain");

            receivedFiles.Should().HaveCount(2);
            receivedFiles[0].Should().BeEquivalentTo(message1.Content.ReReadAll());
            receivedFiles[1].Should().BeEquivalentTo(message2.Content.ReReadAll());

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(6);
        }
예제 #21
0
        public async Task RetryPolicy_JsonChunkSequenceProcessedAfterSomeTries_RetriedMultipleTimesAndCommitted()
        {
            var tryCount = 0;

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(
                        new KafkaProducerEndpoint(DefaultTopicName)
            {
                Chunk = new ChunkSettings
                {
                    Size = 10
                }
            })
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                ErrorPolicy = ErrorPolicy.Retry().MaxFailedAttempts(10)
            }))
                .AddSingletonBrokerBehavior <SpyBrokerBehavior>()
                .AddDelegateSubscriber(
                    (IIntegrationEvent _) =>
            {
                tryCount++;
                if (tryCount % 2 != 0)
                {
                    throw new InvalidOperationException("Retry!");
                }
            }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Long message one"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Long message two"
            });

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            SpyBehavior.OutboundEnvelopes.Should().HaveCount(6);
            SpyBehavior.OutboundEnvelopes.ForEach(
                envelope =>
            {
                envelope.RawMessage.Should().NotBeNull();
                envelope.RawMessage !.Length.Should().BeLessOrEqualTo(10);
            });

            tryCount.Should().Be(4);
            SpyBehavior.InboundEnvelopes.Should().HaveCount(4);
            SpyBehavior.InboundEnvelopes[0].Message.As <TestEventOne>().Content.Should().Be("Long message one");
            SpyBehavior.InboundEnvelopes[1].Message.As <TestEventOne>().Content.Should().Be("Long message one");
            SpyBehavior.InboundEnvelopes[2].Message.As <TestEventOne>().Content.Should().Be("Long message two");
            SpyBehavior.InboundEnvelopes[3].Message.As <TestEventOne>().Content.Should().Be("Long message two");

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(6);
        }
예제 #22
0
        public async Task Batch_DisconnectWhileEnumerating_EnumerationAborted()
        {
            bool aborted          = false;
            var  receivedMessages = new List <TestEventOne>();

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(3)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration =
                {
                    GroupId              = "consumer1",
                    AutoCommitIntervalMs = 100
                },
                Batch = new BatchSettings
                {
                    Size = 10
                }
            }))
                .AddDelegateSubscriber(
                    async(IMessageStreamEnumerable <TestEventOne> eventsStream) =>
            {
                try
                {
                    await foreach (var message in eventsStream)
                    {
                        receivedMessages.Add(message);
                    }
                }
                catch (OperationCanceledException)
                {
                    aborted = true;
                }
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 10; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count > 3);

            receivedMessages.Should().HaveCountGreaterThan(3);

            await Broker.DisconnectAsync();

            await AsyncTestingUtil.WaitAsync(() => aborted);

            aborted.Should().BeTrue();
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(0);
        }
예제 #23
0
        public async Task Streaming_DisconnectWhileEnumerating_EnumerationAborted()
        {
            bool aborted          = false;
            var  receivedMessages = new List <TestEventOne>();

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId = "consumer1";
            })))
                .AddDelegateSubscriber(
                    (IEnumerable <TestEventOne> eventsStream) =>
            {
                try
                {
                    foreach (var message in eventsStream)
                    {
                        receivedMessages.Add(message);
                    }
                }
                catch (OperationCanceledException)
                {
                    Task.Delay(300).Wait();
                    aborted = true;
                }
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 1"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 2"
            });

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            receivedMessages.Should().HaveCount(2);

            await Helper.Broker.DisconnectAsync();

            aborted.Should().BeTrue();
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(2);
        }
예제 #24
0
        public async Task Rebalance_WithPendingBatch_AbortedAndConsumedAfterRebalance()
        {
            var receivedBatches  = new List <List <TestEventOne> >();
            var completedBatches = 0;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://e2e"; })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = false;
                config.CommitOffsetEach = 1;
            })
                        .EnableBatchProcessing(10)))
                .AddDelegateSubscriber(
                    async(IAsyncEnumerable <TestEventOne> eventsStream) =>
            {
                var list = new List <TestEventOne>();
                receivedBatches.ThreadSafeAdd(list);

                await foreach (var message in eventsStream)
                {
                    list.Add(message);
                }

                Interlocked.Increment(ref completedBatches);
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await AsyncTestingUtil.WaitAsync(() => receivedBatches.Sum(batch => batch.Count) == 15);

            receivedBatches.Should().HaveCount(2);
            receivedBatches[0].Should().HaveCount(10);
            receivedBatches[1].Should().HaveCount(5);
            completedBatches.Should().Be(1);
            receivedBatches.Sum(batch => batch.Count).Should().Be(15);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);

            DefaultTopic.Rebalance();

            await AsyncTestingUtil.WaitAsync(() => receivedBatches.Sum(batch => batch.Count) == 20);

            receivedBatches.Should().HaveCount(3);
            receivedBatches[0].Should().HaveCount(10);
            receivedBatches[1].Should().HaveCount(5);
            receivedBatches[2].Should().HaveCount(5);
            completedBatches.Should().Be(1);
            receivedBatches.Sum(batch => batch.Count).Should().Be(20);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);

            for (int i = 16; i <= 20; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            receivedBatches.Should().HaveCount(3);
            receivedBatches[0].Should().HaveCount(10);
            receivedBatches[1].Should().HaveCount(5);
            receivedBatches[2].Should().HaveCount(10);
            completedBatches.Should().Be(2);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(20);
        }
예제 #25
0
        public async Task Chunking_SingleChunkBinaryFileConsumedInBatch_ProducedAndConsumed()
        {
            var    batches            = new List <List <string?> >();
            string?failedCommit       = null;
            string?enumerationAborted = null;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://e2e"; })
                    .AddOutbound <IBinaryFileMessage>(
                        endpoint => endpoint
                        .ProduceTo(DefaultTopicName)
                        .EnableChunking(50))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .EnableBatchProcessing(5)
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = false;
                config.CommitOffsetEach = 1;
            })))
                .AddDelegateSubscriber(
                    async(IAsyncEnumerable <BinaryFileMessage> streamEnumerable) =>
            {
                var list = new List <string?>();
                batches.ThreadSafeAdd(list);

                await foreach (var message in streamEnumerable)
                {
                    var actualCommittedOffsets =
                        DefaultTopic.GetCommittedOffsetsCount("consumer1");
                    var expectedCommittedOffsets = 5 * (batches.Count - 1);

                    if (actualCommittedOffsets != expectedCommittedOffsets)
                    {
                        failedCommit ??=
                        $"{actualCommittedOffsets} != {expectedCommittedOffsets} " +
                        $"({batches.Count}.{list.Count})";
                    }

                    var readAll = await message.Content.ReadAllAsync();
                    list.Add(readAll != null ? Encoding.UTF8.GetString(readAll) : null);
                }

                if (list.Count != 5)
                {
                    enumerationAborted ??=
                    $"Enumeration completed after {list.Count} messages " +
                    $"({batches.Count}.{list.Count})";
                }
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(
                    new BinaryFileMessage(Encoding.UTF8.GetBytes($"Long message {i}")));
            }

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            failedCommit.Should().BeNull();
            enumerationAborted.Should().BeNull();

            batches.Should().HaveCount(3);
            batches[0].Should().HaveCount(5);
            batches[1].Should().HaveCount(5);
            batches[2].Should().HaveCount(5);

            batches[0][0].Should().Be("Long message 1");
            batches[0][1].Should().Be("Long message 2");
            batches[0][2].Should().Be("Long message 3");
            batches[0][3].Should().Be("Long message 4");
            batches[0][4].Should().Be("Long message 5");

            batches[1][0].Should().Be("Long message 6");
            batches[1][1].Should().Be("Long message 7");
            batches[1][2].Should().Be("Long message 8");
            batches[1][3].Should().Be("Long message 9");
            batches[1][4].Should().Be("Long message 10");

            batches[2][0].Should().Be("Long message 11");
            batches[2][1].Should().Be("Long message 12");
            batches[2][2].Should().Be("Long message 13");
            batches[2][3].Should().Be("Long message 14");
            batches[2][4].Should().Be("Long message 15");

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }
예제 #26
0
        public async Task OutboundAndInbound_MqttToKafka_ProducedAndConsumed()
        {
            int eventOneCount = 0;
            int eventTwoCount = 0;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options
                    .AddMockedMqtt()
                    .AddMockedKafka())
                .AddMqttEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config => config
                        .WithClientId("e2e-test")
                        .ConnectViaTcp("e2e-mqtt-broker"))
                    .AddOutbound <TestEventOne>(endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(endpoint => endpoint.ConsumeFrom(DefaultTopicName)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://tests"; })
                    .AddOutbound <TestEventTwo>(endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId = "consumer1";
            })))
                .AddDelegateSubscriber(
                    (TestEventOne eventOne) =>
            {
                Interlocked.Increment(ref eventOneCount);
                return(new TestEventTwo {
                    Content = eventOne.Content
                });
            })
                .AddDelegateSubscriber((TestEventTwo _) => Interlocked.Increment(ref eventTwoCount)))
            .Run();

            var publisher         = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            var mqttTestingHelper = Host.ServiceProvider.GetRequiredService <IMqttTestingHelper>();

            await mqttTestingHelper.WaitUntilConnectedAsync();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(
                    new TestEventOne
                {
                    Content = $"{i}"
                });
            }

            await AsyncTestingUtil.WaitAsync(() => eventOneCount >= 15);

            await AsyncTestingUtil.WaitAsync(() => eventTwoCount >= 15);

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            eventOneCount.Should().Be(15);
            eventTwoCount.Should().Be(15);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }
예제 #27
0
        public async Task Streaming_UnboundedEnumerableProcessingFailed_ConsumerStopped()
        {
            var receivedMessages = new List <TestEventOne>();

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .AsObservable()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(
                        config =>
            {
                config.BootstrapServers = "PLAINTEXT://e2e";
            })
                    .AddOutbound <IIntegrationEvent>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .ConsumeFrom(DefaultTopicName)
                        .Configure(
                            config =>
            {
                config.GroupId          = "consumer1";
                config.EnableAutoCommit = false;
                config.CommitOffsetEach = 1;
            })))
                .AddDelegateSubscriber(
                    async(IAsyncEnumerable <TestEventOne> enumerable) =>
            {
                await foreach (var message in enumerable)
                {
                    receivedMessages.Add(message);
                    if (receivedMessages.Count == 2)
                    {
                        throw new InvalidOperationException("Test");
                    }
                }
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 1"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 2"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 3"
            });

            await Helper.WaitUntilAllMessagesAreConsumedAsync();

            await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count >= 2);

            receivedMessages.Should().HaveCount(2);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(1);
            Helper.Broker.Consumers[0].IsConnected.Should().BeFalse();
        }
예제 #28
0
        public async Task Streaming_UnboundedObservableProcessingFailed_ConsumerStopped()
        {
            var receivedMessages = new List <TestEventOne>();

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .AsObservable()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                }
            }))
                .AddDelegateSubscriber(
                    (IMessageStreamObservable <TestEventOne> observable) =>
                    observable.Subscribe(
                        message =>
            {
                receivedMessages.Add(message);

                if (receivedMessages.Count == 2)
                {
                    throw new InvalidOperationException("Test");
                }
            })))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();
            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 1"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 2"
            });

            await publisher.PublishAsync(
                new TestEventOne
            {
                Content = "Message 3"
            });

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            await AsyncTestingUtil.WaitAsync(() => receivedMessages.Count >= 2);

            receivedMessages.Should().HaveCount(2);
            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(1);
            Broker.Consumers[0].IsConnected.Should().BeFalse();
        }
예제 #29
0
        public async Task Batch_WithTimeout_IncompleteBatchCompletedAfterTimeout()
        {
            var receivedBatches  = new List <List <TestEventOne> >();
            var completedBatches = 0;

            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration =
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                Batch = new BatchSettings
                {
                    Size        = 10,
                    MaxWaitTime = TimeSpan.FromMilliseconds(500)
                }
            }))
                .AddDelegateSubscriber(
                    async(IMessageStreamEnumerable <TestEventOne> eventsStream) =>
            {
                var list = new List <TestEventOne>();
                receivedBatches.Add(list);

                await foreach (var message in eventsStream)
                {
                    list.Add(message);
                }

                completedBatches++;
            }))
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await AsyncTestingUtil.WaitAsync(() => receivedBatches.Sum(batch => batch.Count) == 15);

            receivedBatches.Should().HaveCount(2);
            receivedBatches[0].Should().HaveCount(10);
            receivedBatches[1].Should().HaveCount(5);
            completedBatches.Should().Be(1);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(10);

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            receivedBatches.Should().HaveCount(2);
            receivedBatches[0].Should().HaveCount(10);
            receivedBatches[1].Should().HaveCount(5);
            completedBatches.Should().Be(2);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }
예제 #30
0
        public async Task ExactlyOnce_InMemoryInboundLog_DuplicatedMessagesIgnored()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options
                    .AddMockedKafka(mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))
                    .AddInMemoryInboundLog())
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration =
                {
                    GroupId              = "consumer1",
                    AutoCommitIntervalMs = 100
                },
                ExactlyOnceStrategy = ExactlyOnceStrategy.Log()
            }))
                .AddSingletonSubscriber <OutboundInboundSubscriber>())
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IEventPublisher>();

            await publisher.PublishAsync(new TestEventOne());

            await publisher.PublishAsync(
                new TestEventWithUniqueKey
            {
                UniqueKey = "1"
            });

            await publisher.PublishAsync(
                new TestEventWithUniqueKey
            {
                UniqueKey = "2"
            });

            await publisher.PublishAsync(
                new TestEventWithUniqueKey
            {
                UniqueKey = "1"
            });

            await publisher.PublishAsync(
                new TestEventWithUniqueKey
            {
                UniqueKey = "2"
            });

            await publisher.PublishAsync(new TestEventOne());

            await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync();

            Subscriber.InboundEnvelopes.Should().HaveCount(4);

            Subscriber.InboundEnvelopes[0].Message.Should().BeOfType <TestEventOne>();
            Subscriber.InboundEnvelopes[1].Message.Should().BeOfType <TestEventWithUniqueKey>();
            Subscriber.InboundEnvelopes[1].Message.As <TestEventWithUniqueKey>().UniqueKey.Should().Be("1");
            Subscriber.InboundEnvelopes[2].Message.Should().BeOfType <TestEventWithUniqueKey>();
            Subscriber.InboundEnvelopes[2].Message.As <TestEventWithUniqueKey>().UniqueKey.Should().Be("2");
            Subscriber.InboundEnvelopes[3].Message.Should().BeOfType <TestEventOne>();

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(6);
        }