public void Equals_DifferentSerializerSettings_FalseIsReturned() { var endpoint1 = new KafkaProducerEndpoint("topic") { Serializer = new JsonMessageSerializer { Options = { MaxDepth = 100 } } }; var endpoint2 = new KafkaProducerEndpoint("topic") { Serializer = new JsonMessageSerializer { Options = { MaxDepth = 8 } } }; endpoint1.Equals(endpoint2).Should().BeFalse(); }
public void Produce_SomeMessage_EndpointConfigurationIsNotAltered() { var endpoint = new KafkaProducerEndpoint("test-endpoint") { Configuration = { BootstrapServers = "PLAINTEXT://whatever:1111", MessageTimeoutMs = 10 } }; var endpointCopy = new KafkaProducerEndpoint("test-endpoint") { Configuration = { BootstrapServers = "PLAINTEXT://whatever:1111", MessageTimeoutMs = 10 } }; try { _broker.GetProducer(endpoint).Produce("test"); } catch { // Swallow, we don't care... } endpoint.Should().BeEquivalentTo(endpointCopy); }
public void LogProduceError_NoEnvelope_Logged() { var endpoint = new KafkaProducerEndpoint("[dynamic]"); var actualEndpointName = "test1"; var headers = new MessageHeaderCollection { { DefaultMessageHeaders.MessageType, "Message.Type" }, { DefaultMessageHeaders.MessageId, "1234" }, { KafkaMessageHeaders.KafkaMessageKey, "key1234" } }; var expectedMessage = "Error occurred producing the message. | " + "endpointName: test1, " + "messageType: Message.Type, " + "messageId: 1234, " + "offset: (null), " + "kafkaKey: key1234"; _outboundLogger.LogProduceError( endpoint, actualEndpointName, headers, new InvalidDataException()); _loggerSubstitute.Received(LogLevel.Warning, typeof(InvalidDataException), expectedMessage, 1032); }
public void Constructor_Partition_CorrectlyValidated(int value, bool isValid) { KafkaProducerEndpoint?endpoint = null; Action act = () => { endpoint = new KafkaProducerEndpoint("test", value) { Configuration = new KafkaProducerConfig { BootstrapServers = "test-server" } }; }; if (isValid) { act.Should().NotThrow(); endpoint.Should().NotBeNull(); } else { act.Should().ThrowExactly <ArgumentOutOfRangeException>(); } }
public void Equals_SameTopicAndPartition_TrueIsReturned() { var endpoint1 = new KafkaProducerEndpoint("topic", 1); var endpoint2 = new KafkaProducerEndpoint("topic", 1); endpoint1.Equals(endpoint2).Should().BeTrue(); }
public void Equals_SameSerializerSettings_TrueIsReturned() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Serializer = new JsonMessageSerializer { Options = { MaxDepth = 100 } } }; var endpoint2 = new KafkaProducerEndpoint("endpoint") { Serializer = new JsonMessageSerializer { Options = { MaxDepth = 100 } } }; endpoint1.Equals(endpoint2).Should().BeTrue(); }
public void ShouldBeTheSameAfterDeserialization() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.Leader } }; var json = JsonConvert.SerializeObject(endpoint1, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); var endpoint2 = JsonConvert.DeserializeObject <KafkaProducerEndpoint>(json, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); endpoint2.Configuration.Should().BeEquivalentTo(endpoint1.Configuration); endpoint2.Chunk.Should().BeEquivalentTo(endpoint1.Chunk); endpoint2.Name.Should().BeEquivalentTo(endpoint1.Name); endpoint2.Should().BeEquivalentTo(endpoint1); }
public void Equals_DifferentSerializerSettings_IsFalse() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Serializer = new JsonMessageSerializer { Settings = { MaxDepth = 100 } } }; var endpoint2 = new KafkaProducerEndpoint("endpoint") { Serializer = new JsonMessageSerializer { Settings = { MaxDepth = 8 } } }; endpoint1.Equals(endpoint2).Should().BeFalse(); }
public void Validate_InvalidConfiguration_ExceptionThrown() { var endpoint = new KafkaProducerEndpoint("topic"); Action act = () => endpoint.Validate(); act.Should().ThrowExactly <EndpointConfigurationException>(); }
public void GetProducer_ReturnsNewInMemoryProducer() { var endpoint = new KafkaProducerEndpoint("test"); var producer = _serviceProvider.GetRequiredService <IBroker>().GetProducer(endpoint); producer.Should().NotBeNull(); producer.Should().BeOfType <InMemoryProducer>(); }
public void Equals_SameEndpointInstance_IsTrue() { var endpoint = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.Leader } }; endpoint.Equals(endpoint).Should().BeTrue(); }
public void Equals_SameEndpointInstance_TrueIsReturned() { var endpoint = new KafkaProducerEndpoint("topic") { Configuration = { Acks = Acks.Leader } }; endpoint.Equals(endpoint).Should().BeTrue(); }
public void Validate_MissingTopic_ExceptionThrown() { var endpoint = new KafkaProducerEndpoint(string.Empty) { Configuration = new KafkaProducerConfig { BootstrapServers = "test-server" } }; Action act = () => endpoint.Validate(); act.Should().ThrowExactly <EndpointConfigurationException>(); }
public void Equals_DifferentTopic_FalseIsReturned() { var endpoint1 = new KafkaProducerEndpoint("topic") { Configuration = { Acks = Acks.Leader } }; var endpoint2 = new KafkaProducerEndpoint("topic2") { Configuration = { Acks = Acks.Leader } }; endpoint1.Equals(endpoint2).Should().BeFalse(); }
public void Equals_DifferentConfiguration_IsFalse() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.Leader } }; var endpoint2 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.All } }; endpoint1.Equals(endpoint2).Should().BeFalse(); }
public void Equals_SameConfiguration_TrueIsReturned() { var endpoint1 = new KafkaProducerEndpoint("topic") { Configuration = { Acks = Acks.Leader } }; var endpoint2 = new KafkaProducerEndpoint("topic") { Configuration = { Acks = Acks.Leader } }; endpoint1.Equals(endpoint2).Should().BeTrue(); }
public void Equals_SameConfiguration_IsTrue() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.Leader } }; var endpoint2 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.Leader } }; endpoint1.Equals(endpoint2).Should().BeTrue(); }
public void Equals_DeserializedEndpointWithSameConfiguration_IsTrue() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.Leader } }; var json = JsonConvert.SerializeObject(endpoint1, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); var endpoint2 = JsonConvert.DeserializeObject <KafkaProducerEndpoint>(json, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); endpoint1.Equals(endpoint2).Should().BeTrue(); }
public void ShouldBeSerializable() { var endpoint1 = new KafkaProducerEndpoint("endpoint") { Configuration = new KafkaProducerConfig { Acks = Confluent.Kafka.Acks.All } }; var json = JsonConvert.SerializeObject(endpoint1, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); var endpoint2 = JsonConvert.DeserializeObject <KafkaProducerEndpoint>(json, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); endpoint2.Should().NotBeNull(); endpoint2.Configuration.Acks.Should().Be(endpoint1.Configuration.Acks); }
public void LogProduced_NoEnvelope_Logged() { var endpoint = new KafkaProducerEndpoint("[dynamic]"); var actualEndpointName = "test1"; var headers = new MessageHeaderCollection { { DefaultMessageHeaders.MessageType, "Message.Type" }, { DefaultMessageHeaders.MessageId, "1234" }, { KafkaMessageHeaders.KafkaMessageKey, "key1234" } }; var brokerMessageIdentifier = new KafkaOffset("topic2", 2, 42); var expectedMessage = "Message produced. | " + "endpointName: test1, " + "messageType: Message.Type, " + "messageId: 1234, " + "offset: [2]@42, " + "kafkaKey: key1234"; _outboundLogger.LogProduced(endpoint, actualEndpointName, headers, brokerMessageIdentifier); _loggerSubstitute.Received(LogLevel.Information, null, expectedMessage, 1031); }