public async Task NullMessage_WithoutTypeHeaderAndUsingDefaultSerializer_TombstoneReceived()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://tests"; })
                    .AddOutbound <IIntegrationMessage>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .Configure(config => config.GroupId = "group1")
                        .ConsumeFrom(DefaultTopicName)))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var producer = Helper.Broker.GetProducer(DefaultTopicName);
            await producer.RawProduceAsync(
                (byte[]?)null,
                new MessageHeaderCollection
            {
                { DefaultMessageHeaders.MessageId, "42" }
            });

            await DefaultTopic.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.InboundEnvelopes.Should().HaveCount(1);
            Helper.Spy.InboundEnvelopes[0].Message.Should().BeAssignableTo <Tombstone>();
            Helper.Spy.InboundEnvelopes[0].Message.As <Tombstone>().MessageId.Should().Be("42");
        }
        public async Task Tombstone_WithoutTypeParameter_ProducedAndConsumed()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://tests"; })
                    .AddOutbound <Tombstone>(endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .Configure(config => config.GroupId = "group1")
                        .ConsumeFrom(DefaultTopicName)))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var publisher = Host.ScopedServiceProvider.GetRequiredService <IPublisher>();
            await publisher.PublishAsync(new Tombstone("42"));

            await DefaultTopic.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.InboundEnvelopes.Should().HaveCount(1);
            Helper.Spy.InboundEnvelopes[0].RawMessage.Should().BeNull();
            Helper.Spy.InboundEnvelopes[0].Message.Should().BeAssignableTo <Tombstone>();
            Helper.Spy.InboundEnvelopes[0].Message.As <Tombstone>().MessageId.Should().Be("42");
        }
        public async Task NullMessage_SilentlySkippingNullMessages_NoMessageReceived()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://tests"; })
                    .AddOutbound <IIntegrationMessage>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .Configure(config => config.GroupId = "group1")
                        .ConsumeFrom(DefaultTopicName)
                        .SkipNullMessages()))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var producer = Helper.Broker.GetProducer(DefaultTopicName);
            await producer.RawProduceAsync(
                (byte[]?)null,
                new MessageHeaderCollection
            {
                { DefaultMessageHeaders.MessageType, typeof(TestEventOne).AssemblyQualifiedName },
                { DefaultMessageHeaders.MessageId, "42" }
            });

            await DefaultTopic.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.RawInboundEnvelopes.Should().HaveCount(1);
            Helper.Spy.InboundEnvelopes.Should().HaveCount(0);
        }
        public async Task NullMessage_HandleViaCustomSerializer_CustomWrapperReceived()
        {
            Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(options => options.AddMockedKafka())
                .AddKafkaEndpoints(
                    endpoints => endpoints
                    .Configure(config => { config.BootstrapServers = "PLAINTEXT://tests"; })
                    .AddOutbound <IIntegrationMessage>(
                        endpoint => endpoint.ProduceTo(DefaultTopicName))
                    .AddInbound(
                        endpoint => endpoint
                        .Configure(config => config.GroupId = "group1")
                        .ConsumeFrom(DefaultTopicName)
                        .DeserializeUsing(new CustomSerializer())))
                .AddIntegrationSpyAndSubscriber())
            .Run();

            var producer = Helper.Broker.GetProducer(DefaultTopicName);
            await producer.RawProduceAsync((byte[]?)null);

            await DefaultTopic.WaitUntilAllMessagesAreConsumedAsync();

            Helper.Spy.RawInboundEnvelopes.Should().HaveCount(1);
            Helper.Spy.InboundEnvelopes.Should().HaveCount(1);
            Helper.Spy.InboundEnvelopes[0].Message.Should().BeOfType <CustomSerializer.RawMessage>();
            Helper.Spy.InboundEnvelopes[0].Message.As <CustomSerializer.RawMessage>().Content.Should()
            .BeNull();
        }
Exemple #5
0
        public async Task Batch_StreamEnumerationAborted_CommittedAndNextMessageConsumed()
        {
            var receivedBatches = 0;

            var serviceProvider = Host.ConfigureServices(
                services => services
                .AddLogging()
                .AddSilverback()
                .UseModel()
                .WithConnectionToMessageBroker(
                    options => options.AddMockedKafka(
                        mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1)))
                .AddEndpoints(
                    endpoints => endpoints
                    .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName))
                    .AddInbound(
                        new KafkaConsumerEndpoint(DefaultTopicName)
            {
                Configuration = new KafkaConsumerConfig
                {
                    GroupId          = "consumer1",
                    EnableAutoCommit = false,
                    CommitOffsetEach = 1
                },
                Batch = new BatchSettings
                {
                    Size = 10
                }
            }))
                .AddDelegateSubscriber(
                    (IMessageStreamEnumerable <TestEventOne> eventsStream) => { receivedBatches++; }))
                                  .Run();

            var publisher = serviceProvider.GetRequiredService <IEventPublisher>();

            for (int i = 1; i <= 15; i++)
            {
                await publisher.PublishAsync(new TestEventOne { Content = $"{i}" });
            }

            await DefaultTopic.WaitUntilAllMessagesAreConsumedAsync();

            receivedBatches.Should().Be(15);

            DefaultTopic.GetCommittedOffsetsCount("consumer1").Should().Be(15);
        }