public void ShouldThrowMessageTooLarge() { var connection = new Mock<IKafkaConnection>(); var config = new SyncProducerConfiguration { MaxMessageSize = 99 }; var producer = new SyncProducer(config, connection.Object); producer.Send(new ProducerRequest(1, "client", 0, 0, new List<TopicData>() { new TopicData("test", new List<PartitionData>() { new PartitionData(0, new BufferedMessageSet(new List<Message>() {new Message(new byte[100])}, 0)) }) })); }
public void SimpleSyncProducerSendsLotsOfMessagesAndConsumerConnectorGetsThemBackWithVerySmallAutoCommitInterval() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; consumerConfig.AutoCommit = true; consumerConfig.AutoCommitInterval = 10; int numberOfMessages = 500; int messageSize = 0; List <Message> messagesToSend = new List <Message>(); using (var producer = new SyncProducer(prodConfig)) { for (int i = 0; i < numberOfMessages; i++) { string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg = new Message(payloadData1); messagesToSend.Add(msg); if (i == 0) { messageSize = msg.Size; } producer.Send(CurrentTestTopic, 0, new List <Message>() { msg }); } } Thread.Sleep(2000); // now consuming int resultCount = 0; using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { CurrentTestTopic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[CurrentTestTopic]; try { foreach (var set in sets) { foreach (var message in set) { Assert.AreEqual(messageSize, message.Size); resultCount++; } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(numberOfMessages, resultCount); }
public KafkaProducerException(SyncProducer syncProducer) { this.innerProducer = syncProducer; }
public void SimpleSyncProducerSendsLotsOfTwiceCompressedMessagesAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; int numberOfMessages = 500; int messagesPerPackage = 5; int messageSize = 0; int messagesPerInnerPackage = 5; using (var producer = new SyncProducer(prodConfig)) { for (int i = 0; i < numberOfMessages; i++) { var messagePackageList = new List <Message>(); for (int messageInPackageNr = 0; messageInPackageNr < messagesPerPackage; messageInPackageNr++) { var innerMessagePackageList = new List <Message>(); for (int inner = 0; inner < messagesPerInnerPackage; inner++) { string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg = new Message(payloadData1); innerMessagePackageList.Add(msg); } var innerPackageMessage = CompressionUtils.Compress(innerMessagePackageList, CompressionCodecs.GZIPCompressionCodec); messagePackageList.Add(innerPackageMessage); } var packageMessage = CompressionUtils.Compress(messagePackageList, CompressionCodecs.GZIPCompressionCodec); producer.Send(CurrentTestTopic, 0, new List <Message>() { packageMessage }); } } Thread.Sleep(2000); // now consuming int resultCount = 0; using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { CurrentTestTopic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[CurrentTestTopic]; try { foreach (var set in sets) { foreach (var message in set) { resultCount++; } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(numberOfMessages * messagesPerPackage * messagesPerInnerPackage, resultCount); }
public void SimpleSyncProducerSendsLotsOfCompressedMessagesWithIncreasedSizeAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; consumerConfig.AutoCommit = true; consumerConfig.AutoCommitInterval = 100; int numberOfMessages = 2000; int messagesPerPackage = 5; string topic = CurrentTestTopic; var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic); multipleBrokersHelper.GetCurrentOffsets( new[] { prodConfig }); int msgNr = 0; long totalSize = 0; using (var producer = new SyncProducer(prodConfig)) { for (int i = 0; i < numberOfMessages; i++) { var messagePackageList = new List <Message>(); for (int messageInPackageNr = 0; messageInPackageNr < messagesPerPackage; messageInPackageNr++) { string payload1 = CreatePayloadByNumber(msgNr); byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg = new Message(payloadData1); totalSize += msg.Size; messagePackageList.Add(msg); msgNr++; } var packageMessage = CompressionUtils.Compress(messagePackageList, CompressionCodecs.GZIPCompressionCodec); producer.Send(topic, 0, new List <Message>() { packageMessage }); } } // now consuming int resultCount = 0; long resultSize = 0; using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { topic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[topic]; try { foreach (var set in sets) { foreach (var message in set) { Assert.AreEqual(CreatePayloadByNumber(resultCount), Encoding.UTF8.GetString(message.Payload)); resultCount++; resultSize += message.Size; } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(numberOfMessages * messagesPerPackage, resultCount); Assert.AreEqual(totalSize, resultSize); }
public void SimpleSyncProducerSends2MessagesAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; var consConf = this.ConsumerConfig1; // first producing string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List <Message> { msg1, msg2 }); using (var producer = new SyncProducer(prodConfig)) { producer.Send(producerRequest); } var consumer = new Consumer(consConf); long offset = 0; var result = consumer.Fetch( new FetchRequest(CurrentTestTopic, 0, offset, 400)); foreach (var resultItem in result) { offset += resultItem.Offset; } // now consuming var resultMessages = new List <Message>(); using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { CurrentTestTopic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[CurrentTestTopic]; try { foreach (var set in sets) { foreach (var message in set) { resultMessages.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(2, resultMessages.Count); Assert.AreEqual(msg1.ToString(), resultMessages[0].ToString()); Assert.AreEqual(msg2.ToString(), resultMessages[1].ToString()); }
public void ConsumerConnectorConsumesTwoDifferentTopics() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; string topic1 = CurrentTestTopic + "1"; string topic2 = CurrentTestTopic + "2"; // first producing string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); using (var producer = new SyncProducer(prodConfig)) { var producerRequest1 = new ProducerRequest(topic1, 0, new List <Message> { msg1 }); producer.Send(producerRequest1); var producerRequest2 = new ProducerRequest(topic2, 0, new List <Message> { msg2 }); producer.Send(producerRequest2); } // now consuming var resultMessages1 = new List <Message>(); var resultMessages2 = new List <Message>(); using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { topic1, 1 }, { topic2, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); Assert.IsTrue(messages.ContainsKey(topic1)); Assert.IsTrue(messages.ContainsKey(topic2)); var sets1 = messages[topic1]; try { foreach (var set in sets1) { foreach (var message in set) { resultMessages1.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } var sets2 = messages[topic2]; try { foreach (var set in sets2) { foreach (var message in set) { resultMessages2.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(1, resultMessages1.Count); Assert.AreEqual(msg1.ToString(), resultMessages1[0].ToString()); Assert.AreEqual(1, resultMessages2.Count); Assert.AreEqual(msg2.ToString(), resultMessages2[0].ToString()); }
public KafkaProducerException(SyncProducer syncProducer, ProducerSyncExceptionOptions options) : this(syncProducer) { this.options = options; }
public SyncPipeOutput(string topic, TimeSpan consumeTimeout, IStreamConfig configuration, SyncProducer producer, CancellationToken token) { this.token = token; topicName = topic; timeout = consumeTimeout; consumer = new SyncConsumer(configuration.ToConsumerConfig($"pipe-output-{configuration.ApplicationId}-{topicName}"), producer); consumer.Subscribe(topicName); }
public void MaxFetchSizeBugShouldNotAppearWhenSmallFetchSizeAndSingleMessageSmallerThanFetchSize() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; consumerConfig.FetchSize = 256; consumerConfig.NumberOfTries = 1; consumerConfig.AutoCommitInterval = 1000; int numberOfMessagesToSend = 100; string topic = CurrentTestTopic; var msgList = new List <Message>(); using (var producer = new SyncProducer(prodConfig)) { for (int i = 0; i < numberOfMessagesToSend; i++) { string payload = CreatePayloadByNumber(i + 100); byte[] payloadData = Encoding.UTF8.GetBytes(payload); var msg = new Message(payloadData); msgList.Add(msg); var producerRequest = new ProducerRequest(topic, 0, new List <Message>() { msg }); producer.Send(producerRequest); } } Thread.Sleep(3000); // now consuming int messageNumberCounter = 0; using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { topic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[topic]; try { foreach (var set in sets) { foreach (var message in set) { Assert.AreEqual(CreatePayloadByNumber(messageNumberCounter + 100), Encoding.UTF8.GetString(message.Payload)); messageNumberCounter++; } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(numberOfMessagesToSend, messageNumberCounter); }