public async Task Retention_ProducedLotOfMessages_FirstMessagesRemoved() { var serviceProvider = Host.ConfigureServices( services => services .AddLogging() .AddSilverback() .UseModel() .WithConnectionToMessageBroker( options => options.AddMockedKafka( mockedKafkaOptions => mockedKafkaOptions.WithDefaultPartitionsCount(1))) .AddEndpoints( endpoints => endpoints .AddOutbound <IIntegrationEvent>(new KafkaProducerEndpoint(DefaultTopicName)) .AddInbound( new KafkaConsumerEndpoint(DefaultTopicName) { Configuration = new KafkaConsumerConfig { GroupId = "consumer1", AutoCommitIntervalMs = 100 } })) .AddSingletonBrokerBehavior <SpyBrokerBehavior>() .AddSingletonSubscriber <OutboundInboundSubscriber>()) .Run(); var publisher = serviceProvider.GetRequiredService <IEventPublisher>(); for (int i = 1; i <= 100; i++) { await publisher.PublishAsync(new TestEventOne()); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.InboundEnvelopes.Should().HaveCount(100); DefaultTopic.GetFirstOffset(new Partition(0)).Should().Be(new Offset(0)); DefaultTopic.GetLastOffset(new Partition(0)).Should().Be(new Offset(99)); for (int i = 1; i <= 10; i++) { await publisher.PublishAsync(new TestEventOne()); } await KafkaTestingHelper.WaitUntilAllMessagesAreConsumedAsync(); Subscriber.InboundEnvelopes.Should().HaveCount(110); DefaultTopic.GetFirstOffset(new Partition(0)).Should().Be(new Offset(10)); DefaultTopic.GetLastOffset(new Partition(0)).Should().Be(new Offset(109)); }