public void IsSerializable() { var endpoint1 = new KafkaConsumerEndpoint("endpoint") { Configuration = new KafkaConsumerConfig { CommitOffsetEach = 5, AutoCommitIntervalMs = 1000 } }; var json = JsonConvert.SerializeObject(endpoint1, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); var endpoint2 = JsonConvert.DeserializeObject <KafkaConsumerEndpoint>(json, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); endpoint2.Should().NotBeNull(); endpoint2.Configuration.AutoCommitIntervalMs.Should().Be(endpoint1.Configuration.AutoCommitIntervalMs); endpoint2.Configuration.CommitOffsetEach.Should().Be(endpoint1.Configuration.CommitOffsetEach); }
public void Constructor_MultipleTopics_TopicsSet() { var endpoint = new KafkaConsumerEndpoint("topic1", "topic2"); endpoint.Name.Should().Be("[topic1,topic2]"); endpoint.Names.Should().BeEquivalentTo("topic1", "topic2"); endpoint.TopicPartitions.Should().BeNull(); }
public void Constructor_SingleTopic_TopicSet() { var endpoint = new KafkaConsumerEndpoint("topic"); endpoint.Name.Should().Be("topic"); endpoint.Names.Should().BeEquivalentTo("topic"); endpoint.TopicPartitions.Should().BeNull(); }
public void Constructor_SingleTopicPartition_TopicsSet() { var endpoint = new KafkaConsumerEndpoint(new TopicPartition("topic", 2)); endpoint.Name.Should().Be("topic"); endpoint.Names.Should().BeEquivalentTo("topic"); endpoint.TopicPartitions.Should().BeEquivalentTo( new[] { new TopicPartitionOffset("topic", 2, Offset.Unset) }); }
public void GetConsumer_ReturnsNewInMemoryConsumer() { var endpoint = new KafkaConsumerEndpoint("test"); var consumer = _serviceProvider.GetRequiredService <IBroker>().GetConsumer(endpoint); consumer.Should().NotBeNull(); consumer.Should().BeOfType <InMemoryConsumer>(); }
public void Validate_InvalidConfiguration_ExceptionThrown() { var endpoint = new KafkaConsumerEndpoint("topic") { Configuration = new KafkaConsumerConfig() }; Action act = () => endpoint.Validate(); act.Should().ThrowExactly <EndpointConfigurationException>(); }
public void Equals_SameEndpointInstance_TrueIsReturned() { var endpoint = new KafkaConsumerEndpoint("topic") { Configuration = { AutoCommitIntervalMs = 1000 } }; endpoint.Equals(endpoint).Should().BeTrue(); }
public void Equals_SameEndpointInstance_IsTrue() { var endpoint = new KafkaConsumerEndpoint("endpoint") { Configuration = new KafkaConsumerConfig { AutoCommitIntervalMs = 1000 } }; endpoint.Equals(endpoint).Should().BeTrue(); }
public void Constructor_SingleTopicAndPartitionResolver_TopicsAndResolverSet() { var endpoint = new KafkaConsumerEndpoint( "topic1", partitions => partitions); endpoint.Name.Should().Be("topic1"); endpoint.Names.Should().BeEquivalentTo("topic1"); endpoint.TopicPartitions.Should().BeNull(); endpoint.TopicPartitionsResolver.Should().NotBeNull(); endpoint.TopicPartitionsResolver.Should() .BeOfType <Func <IReadOnlyCollection <TopicPartition>, IEnumerable <TopicPartitionOffset> > >(); }
public void Validate_MissingTopic_ExceptionThrown() { var endpoint = new KafkaConsumerEndpoint(Array.Empty <string>()) { Configuration = new KafkaConsumerConfig { BootstrapServers = "test-server" } }; Action act = () => endpoint.Validate(); act.Should().ThrowExactly <EndpointConfigurationException>(); }
public void Constructor_MultipleTopicsAndPartitionResolverWithOffsets_TopicsAndResolverSet() { Func <IReadOnlyCollection <TopicPartition>, IEnumerable <TopicPartitionOffset> > resolver = partitions => partitions.Select( partition => new TopicPartitionOffset(partition, Offset.Beginning)); var endpoint = new KafkaConsumerEndpoint( new[] { "topic1", "topic2" }, resolver); endpoint.Name.Should().Be("[topic1,topic2]"); endpoint.Names.Should().BeEquivalentTo("topic1", "topic2"); endpoint.TopicPartitions.Should().BeNull(); endpoint.TopicPartitionsResolver.Should().Be(resolver); }
public void Constructor_MultipleTopicPartitions_TopicsSet() { var endpoint = new KafkaConsumerEndpoint( new TopicPartition("topic1", 0), new TopicPartition("topic1", 1), new TopicPartition("topic2", 2), new TopicPartition("topic2", 3)); endpoint.Name.Should().Be("[topic1[0],topic1[1],topic2[2],topic2[3]]"); endpoint.Names.Should().BeEquivalentTo("topic1[0]", "topic1[1]", "topic2[2]", "topic2[3]"); endpoint.TopicPartitions.Should().BeEquivalentTo( new TopicPartitionOffset("topic1", 0, Offset.Unset), new TopicPartitionOffset("topic1", 1, Offset.Unset), new TopicPartitionOffset("topic2", 2, Offset.Unset), new TopicPartitionOffset("topic2", 3, Offset.Unset)); }
public void Constructor_TopicPartitionOffsets_TopicsSet() { var endpoint = new KafkaConsumerEndpoint( new TopicPartitionOffset("topic1", 0, Offset.Beginning), new TopicPartitionOffset("topic1", 1, Offset.End), new TopicPartitionOffset("topic2", 2, 42), new TopicPartitionOffset("topic2", 3, Offset.Unset)); endpoint.Name.Should().Be("[topic1,topic2]"); endpoint.Names.Should().BeEquivalentTo("topic1", "topic2"); endpoint.TopicPartitions.Should().BeEquivalentTo( new TopicPartitionOffset("topic1", 0, Offset.Beginning), new TopicPartitionOffset("topic1", 1, Offset.End), new TopicPartitionOffset("topic2", 2, 42), new TopicPartitionOffset("topic2", 3, Offset.Unset)); }
private static KafkaConsumerEndpoint CreateConsumerEndpoint(string[] names, IMessageSerializer messageSerializer) { var endpoint = new KafkaConsumerEndpoint(names) { Configuration = new KafkaConsumerConfig { BootstrapServers = "PLAINTEXT://localhost:9092", GroupId = "silverback-examples" } }; if (messageSerializer != null) { endpoint.Serializer = messageSerializer; } return(endpoint); }
private static KafkaConsumerEndpoint CreateConsumerEndpoint(string name, IMessageSerializer messageSerializer = null) { var endpoint = new KafkaConsumerEndpoint(name) { Configuration = new KafkaConsumerConfig { BootstrapServers = "PLAINTEXT://kafka:9092", ClientId = "consumer-service-a", GroupId = "silverback-examples" } }; if (messageSerializer != null) { endpoint.Serializer = messageSerializer; } return(endpoint); }
public void Equals_DifferentConfiguration_IsFalse() { var endpoint1 = new KafkaConsumerEndpoint("endpoint") { Configuration = new KafkaConsumerConfig { AutoCommitIntervalMs = 1000 } }; var endpoint2 = new KafkaConsumerEndpoint("endpoint") { Configuration = new KafkaConsumerConfig { BrokerAddressTtl = 2000 } }; endpoint1.Equals(endpoint2).Should().BeFalse(); }
public void Equals_SameConfiguration_IsTrue() { var endpoint1 = new KafkaConsumerEndpoint("endpoint") { Configuration = new KafkaConsumerConfig { AutoCommitIntervalMs = 1000 } }; var endpoint2 = new KafkaConsumerEndpoint("endpoint") { Configuration = new KafkaConsumerConfig { AutoCommitIntervalMs = 1000 } }; endpoint1.Equals(endpoint2).Should().BeTrue(); }
public void Equals_DifferentTopic_FalseIsReturned() { var endpoint1 = new KafkaConsumerEndpoint("topic") { Configuration = { AutoCommitIntervalMs = 1000 } }; var endpoint2 = new KafkaConsumerEndpoint("topic2") { Configuration = { AutoCommitIntervalMs = 1000 } }; endpoint1.Equals(endpoint2).Should().BeFalse(); }
public void Subscribe(KafkaConsumerEndpoint endpoint) { _endpoints.Add(endpoint); }
public void Subscribe(KafkaConsumerEndpoint endpoint) { _endpoints.Add(endpoint); _innerConsumer.Subscribe(_endpoints.Select(e => e.Name)); }