Message send to Kafka server
Format: 1 byte "magic" identifier to allow format changes 4 byte CRC32 of the payload N - 5 byte payload
Inheritance: IWritable
        public void ZKAwareProducerSends1Message()
        {
            int totalWaitTimeInMiliseconds = 0;
            int waitSingle = 100;
            var originalMessage = new Message(Encoding.UTF8.GetBytes("TestData"));

            var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic);
            multipleBrokersHelper.GetCurrentOffsets();

            var producerConfig = new ProducerConfig(clientConfig);
            var mockPartitioner = new MockAlwaysZeroPartitioner();
            using (var producer = new Producer<string, Message>(producerConfig, mockPartitioner, new DefaultEncoder()))
            {
                var producerData = new ProducerData<string, Message>(
                    CurrentTestTopic, "somekey", new List<Message>() { originalMessage });
                producer.Send(producerData);

                while (!multipleBrokersHelper.CheckIfAnyBrokerHasChanged())
                {
                    totalWaitTimeInMiliseconds += waitSingle;
                    Thread.Sleep(waitSingle);
                    if (totalWaitTimeInMiliseconds > MaxTestWaitTimeInMiliseconds)
                    {
                        Assert.Fail("None of the brokers changed their offset after sending a message");
                    }
                }

                totalWaitTimeInMiliseconds = 0;

                var consumerConfig = new ConsumerConfig(clientConfig)
                    {
                        Host = multipleBrokersHelper.BrokerThatHasChanged.Address,
                        Port = multipleBrokersHelper.BrokerThatHasChanged.Port
                    };
                IConsumer consumer = new Consumers.Consumer(consumerConfig);
                var request = new FetchRequest(CurrentTestTopic, 0, multipleBrokersHelper.OffsetFromBeforeTheChange);

                BufferedMessageSet response;

                while (true)
                {
                    Thread.Sleep(waitSingle);
                    response = consumer.Fetch(request);
                    if (response != null & response.Messages.Count() > 0)
                    {
                        break;
                    }

                    totalWaitTimeInMiliseconds += waitSingle;
                    if (totalWaitTimeInMiliseconds >= MaxTestWaitTimeInMiliseconds)
                    {
                        break;
                    }
                }

                Assert.NotNull(response);
                Assert.AreEqual(1, response.Messages.Count());
                Assert.AreEqual(originalMessage.ToString(), response.Messages.First().ToString());
            }
        }
Esempio n. 2
0
        public void ProducerSends1Message()
        {
            var prodConfig = this.ConfigBasedSyncProdConfig;

            int totalWaitTimeInMiliseconds = 0;
            int waitSingle = 100;
            var originalMessage = new Message(Encoding.UTF8.GetBytes("TestData"));

            var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic);
            multipleBrokersHelper.GetCurrentOffsets(
                new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 });
            using (var producer = new Producer(prodConfig))
            {
                var producerData = new ProducerData<string, Message>(
                    CurrentTestTopic, new List<Message> { originalMessage });
                producer.Send(producerData);
                Thread.Sleep(waitSingle);
            }

            while (
                !multipleBrokersHelper.CheckIfAnyBrokerHasChanged(
                    new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 }))
            {
                totalWaitTimeInMiliseconds += waitSingle;
                Thread.Sleep(waitSingle);
                if (totalWaitTimeInMiliseconds > this.maxTestWaitTimeInMiliseconds)
                {
                    Assert.Fail("None of the brokers changed their offset after sending a message");
                }
            }

            totalWaitTimeInMiliseconds = 0;

            var consumerConfig = new ConsumerConfiguration(
                multipleBrokersHelper.BrokerThatHasChanged.Host, multipleBrokersHelper.BrokerThatHasChanged.Port);
            IConsumer consumer = new Consumer(consumerConfig);
            var request1 = new FetchRequest(CurrentTestTopic, multipleBrokersHelper.PartitionThatHasChanged, multipleBrokersHelper.OffsetFromBeforeTheChange);
            BufferedMessageSet response;
            while (true)
            {
                Thread.Sleep(waitSingle);
                response = consumer.Fetch(request1);
                if (response != null && response.Messages.Count() > 0)
                {
                    break;
                }

                totalWaitTimeInMiliseconds += waitSingle;
                if (totalWaitTimeInMiliseconds >= this.maxTestWaitTimeInMiliseconds)
                {
                    break;
                }
            }

            Assert.NotNull(response);
            Assert.AreEqual(1, response.Messages.Count());
            Assert.AreEqual(originalMessage.ToString(), response.Messages.First().ToString());
        }
Esempio n. 3
0
        internal void AddMessage(KafkaMessages.Message message)
        {
            var slidingDoor = SlidingDoors.GetOrAdd(message.PartitionId.Value, partition =>
            {
                return(new SlidingDoor(CommitOffset,
                                       partition,
                                       Configuration.Instance.GetCommitPerMessage()));
            });

            slidingDoor.AddOffset(message.Offset);
        }
Esempio n. 4
0
 public static ConsumedMessage AsConsumedMessage(this KafkaMessage message)
 {
     return(new ConsumedMessage
     {
         Partition = message.PartitionId ?? 0,
         Offset = message.Offset,
         Key = message.Key.Decode(),
         Value = message.Payload.Decode(),
         Codec = (Compression)message.CompressionCodec
     });
 }
Esempio n. 5
0
 public void TestChecksum()
 {
     foreach (var v in this.messages)
     {
         Assert.False(v.Message.Equals(null));
         Assert.False(v.Message.Equals("asdf"));
         Assert.True(v.Message.Equals(v.Message));
         var copy = new Message(v.Payload, v.Key, v.Codec);
         Assert.True(v.Message.Equals(copy));
     }
 }
Esempio n. 6
0
        public void BufferedMessageSetWriteToValidSequence()
        {
            byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5 };
            Message msg1 = new Message(messageBytes);
            Message msg2 = new Message(messageBytes);
            MessageSet messageSet = new BufferedMessageSet(new List<Message>() { msg1, msg2 });
            MemoryStream ms = new MemoryStream();
            messageSet.WriteTo(ms);

            ////first message

            byte[] messageLength = new byte[MessageLengthPartLength];
            Array.Copy(ms.ToArray(), MessageLengthPartOffset, messageLength, 0, MessageLengthPartLength);
            if (BitConverter.IsLittleEndian)
            {
                Array.Reverse(messageLength);
            }

            Assert.AreEqual(MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length, BitConverter.ToInt32(messageLength, 0));

            Assert.AreEqual(1, ms.ToArray()[MagicNumberPartOffset]);    // default magic number should be 1

            byte[] checksumPart = new byte[ChecksumPartLength];
            Array.Copy(ms.ToArray(), ChecksumPartOffset, checksumPart, 0, ChecksumPartLength);
            Assert.AreEqual(Crc32Hasher.Compute(messageBytes), checksumPart);

            byte[] dataPart = new byte[messageBytes.Length];
            Array.Copy(ms.ToArray(), DataPartOffset, dataPart, 0, messageBytes.Length);
            Assert.AreEqual(messageBytes, dataPart);

            ////second message
            int secondMessageOffset = MessageLengthPartLength + MagicNumberPartLength + AttributesPartLength + ChecksumPartLength +
                                      messageBytes.Length;

            messageLength = new byte[MessageLengthPartLength];
            Array.Copy(ms.ToArray(), secondMessageOffset + MessageLengthPartOffset, messageLength, 0, MessageLengthPartLength);
            if (BitConverter.IsLittleEndian)
            {
                Array.Reverse(messageLength);
            }

            Assert.AreEqual(MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length, BitConverter.ToInt32(messageLength, 0));

            Assert.AreEqual(1, ms.ToArray()[secondMessageOffset + MagicNumberPartOffset]);    // default magic number should be 1

            checksumPart = new byte[ChecksumPartLength];
            Array.Copy(ms.ToArray(), secondMessageOffset + ChecksumPartOffset, checksumPart, 0, ChecksumPartLength);
            Assert.AreEqual(Crc32Hasher.Compute(messageBytes), checksumPart);

            dataPart = new byte[messageBytes.Length];
            Array.Copy(ms.ToArray(), secondMessageOffset + DataPartOffset, dataPart, 0, messageBytes.Length);
            Assert.AreEqual(messageBytes, dataPart);
        }
 public void ShouldAbleToEnumerateMessages()
 {
     var msg1 = new Message(new byte[101]) {Offset = 0};
     var msg2 = new Message(new byte[102]) {Offset = 1};
     var set = new BufferedMessageSet(new List<Message>() {msg1, msg2}, 0);
     set.MoveNext().Should().BeTrue();
     set.Current.Message.Payload.Length.Should().Be(101);
     set.Current.Message.Offset.Should().Be(0);
     set.MoveNext().Should().BeTrue();
     set.Current.Message.Payload.Length.Should().Be(102);
     set.Current.Message.Offset.Should().Be(1);
     set.MoveNext().Should().BeFalse();
 }
        private static void Initializer(ProducePerfTestHelperOption producewrapperOption)
        {
            Logger.InfoFormat("prepare perf test, {0} ", DateTime.Now);
            listOfDataNeedSendInOneBatch = new List<ProducerData<byte[], Message>>();
            for (int i = 0; i < producewrapperOption.MessageCountPerBatch; i++)
            {
                String vKey = KafkaClientHelperUtils.GetRandomString(32);
                byte[] bKey = System.Text.Encoding.UTF8.GetBytes(vKey);

                String val = KafkaClientHelperUtils.GetRandomString(producewrapperOption.MessageSize);
                byte[] bVal = System.Text.Encoding.UTF8.GetBytes(val);

                Message message = new Message(bVal, bKey, producewrapperOption.CompressionCodec);
                listOfDataNeedSendInOneBatch.Add(new ProducerData<byte[], Message>(producewrapperOption.Topic, message));
            }
        }
 public void ShouldAbleToWriteMessageSetWithExtraBytes()
 {
     var stream = new MemoryStream();
     var writer = new KafkaBinaryWriter(stream);
     var msg1 = new Message(new byte[101]) {Offset = 0};
     var msg2 = new Message(new byte[102]) {Offset = 1};
     var set = new BufferedMessageSet(new List<Message>() {msg1, msg2}, 0);
     set.WriteTo(writer);
     writer.Write(new byte[10]); // less than offset and size
     var size = (int) stream.Position;
     stream.Seek(0, SeekOrigin.Begin);
     var reader = new KafkaBinaryReader(stream);
     var newSet = BufferedMessageSet.ParseFrom(reader, size, 0);
     var messages = newSet.Messages.ToList();
     messages.Count().Should().Be(2);
     messages[0].Payload.Count().Should().Be(101);
     messages[1].Payload.Count().Should().Be(102);
 }
Esempio n. 10
0
        public void GetBytesValidSequence()
        {
            Message message = new Message(new byte[10], (byte)245);

            MemoryStream ms = new MemoryStream();
            message.WriteTo(ms);

            // len(payload) + 1 + 4
            Assert.AreEqual(15, ms.Length);

            // first 4 bytes = the magic number
            Assert.AreEqual((byte)245, ms.ToArray()[0]);

            // next 4 bytes = the checksum
            Assert.IsTrue(message.Checksum.SequenceEqual(ms.ToArray().Skip(1).Take(4).ToArray<byte>()));

            // remaining bytes = the payload
            Assert.AreEqual(10, ms.ToArray().Skip(5).ToArray<byte>().Length);
        }
Esempio n. 11
0
        public void AsyncProducerSendsAndConsumerReceivesSingleSimpleMessage()
        {
            Message sourceMessage = new Message(Encoding.UTF8.GetBytes("test message"));

            var config = new AsyncProducerConfig(clientConfig);
            var producer = new AsyncProducer(config);
            var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List<Message>() { sourceMessage });

            long currentOffset = TestHelper.GetCurrentKafkaOffset(CurrentTestTopic, clientConfig);

            producer.Send(producerRequest);

            ConsumerConfig consumerConfig = new ConsumerConfig(clientConfig);
            IConsumer consumer = new Consumers.Consumer(consumerConfig);
            FetchRequest request = new FetchRequest(CurrentTestTopic, 0, currentOffset);

            BufferedMessageSet response;
            int totalWaitTimeInMiliseconds = 0;
            int waitSingle = 100;
            while (true)
            {
                Thread.Sleep(waitSingle);
                response = consumer.Fetch(request);
                if (response != null && response.Messages.Count() > 0)
                {
                    break;
                }
                else
                {
                    totalWaitTimeInMiliseconds += waitSingle;
                    if (totalWaitTimeInMiliseconds >= MaxTestWaitTimeInMiliseconds)
                    {
                        break;
                    }
                }
            }

            Assert.NotNull(response);
            Assert.AreEqual(1, response.Messages.Count());
            Message resultMessage = response.Messages.First();
            Assert.AreEqual(sourceMessage.ToString(), resultMessage.ToString());
        }
Esempio n. 12
0
        public void GetBytesValidSequence()
        {
            Message message = new Message(new byte[10], CompressionCodecs.NoCompressionCodec);

            MemoryStream ms = new MemoryStream();
            message.WriteTo(ms);

            // len(payload) + 1 + 4
            Assert.AreEqual(16, ms.Length);

            // first 4 bytes = the magic number
            Assert.AreEqual((byte)1, ms.ToArray()[0]);

            // attributes
            Assert.AreEqual((byte)0, ms.ToArray()[1]);

            // next 4 bytes = the checksum
            Assert.IsTrue(message.Checksum.SequenceEqual(ms.ToArray().Skip(2).Take(4).ToArray<byte>()));

            // remaining bytes = the payload
            Assert.AreEqual(10, ms.ToArray().Skip(6).ToArray<byte>().Length);
        }
Esempio n. 13
0
        public void then_properties_should_be_set()
        {
            var message = new KafkaMessage(
                "value1".Encode(),
                "key1".Encode(),
                CompressionCodecs.GZIPCompressionCodec
                )
            {
                PartitionId = 1,
                Offset      = 1234
            };

            var actual   = message.AsConsumedMessage();
            var expected = new ConsumedMessage
            {
                Codec     = Compression.GZip,
                Key       = "key1",
                Value     = "value1",
                Partition = 1,
                Offset    = 1234
            };

            actual.ShouldBeEquivalentTo(expected);
        }
Esempio n. 14
0
        public void WriteToValidSequenceForDefaultConstructor()
        {
            byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 };
            Message message = new Message(messageBytes);
            MemoryStream ms = new MemoryStream();
            message.WriteTo(ms);

            Assert.AreEqual(0, ms.ToArray()[MagicNumberPartOffset]);    // default magic number should be 0

            byte[] checksumPart = new byte[ChecksumPartLength];
            Array.Copy(ms.ToArray(), ChecksumPartOffset, checksumPart, 0, ChecksumPartLength);
            Assert.AreEqual(Crc32Hasher.Compute(messageBytes), checksumPart);

            byte[] dataPart = new byte[messageBytes.Length];
            Array.Copy(ms.ToArray(), DataPartOffset, dataPart, 0, messageBytes.Length);
            Assert.AreEqual(messageBytes, dataPart);
        }
Esempio n. 15
0
 protected bool Equals(Message other)
 {
     return this.buffer.Equals(other.Buffer);
 }
Esempio n. 16
0
        public void SimpleSyncProducerSends2MessagesAndConsumerConnectorGetsThemBack()
        {
            // first producing
            string payload1 = "kafka 1.";
            byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
            var msg1 = new Message(payloadData1);

            string payload2 = "kafka 2.";
            byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
            var msg2 = new Message(payloadData2);

            var producerConfig = new SyncProducerConfig(clientConfig);
            var producer = new SyncProducer(producerConfig);
            var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List<Message> { msg1, msg2 });
            producer.Send(producerRequest);

            // now consuming
            var config = new ConsumerConfig(clientConfig) { AutoCommit = false };
            var resultMessages = new List<Message>();
            using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(config, true))
            {
                var topicCount = new Dictionary<string, int> { { CurrentTestTopic, 1 } };
                var messages = consumerConnector.CreateMessageStreams(topicCount);
                var sets = messages[CurrentTestTopic];
                try
                {
                    foreach (var set in sets)
                    {
                        foreach (var message in set)
                        {
                            resultMessages.Add(message);
                        }
                    }
                }
                catch (ConsumerTimeoutException)
                {
                    // do nothing, this is expected
                }
            }

            Assert.AreEqual(2, resultMessages.Count);
            Assert.AreEqual(msg1.ToString(), resultMessages[0].ToString());
            Assert.AreEqual(msg2.ToString(), resultMessages[1].ToString());
        }
Esempio n. 17
0
        public void OneMessageIsSentAndReceivedThenExceptionsWhenNoMessageThenAnotherMessageIsSentAndReceived()
        {
            // first producing
            string payload1 = "kafka 1.";
            byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
            var msg1 = new Message(payloadData1);

            var producerConfig = new SyncProducerConfig(clientConfig);
            var producer = new SyncProducer(producerConfig);
            var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List<Message> { msg1 });
            producer.Send(producerRequest);

            // now consuming
            var config = new ConsumerConfig(clientConfig) { AutoCommit = false, Timeout = 5000 };
            using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(config, true))
            {
                var topicCount = new Dictionary<string, int> { { CurrentTestTopic, 1 } };
                var messages = consumerConnector.CreateMessageStreams(topicCount);
                var sets = messages[CurrentTestTopic];
                KafkaMessageStream myStream = sets[0];
                var enumerator = myStream.GetEnumerator();

                Assert.IsTrue(enumerator.MoveNext());
                Assert.AreEqual(msg1.ToString(), enumerator.Current.ToString());

                Assert.Throws<ConsumerTimeoutException>(() => enumerator.MoveNext());

                Assert.Throws<Exception>(() => enumerator.MoveNext()); // iterator is in failed state

                enumerator.Reset();

                // producing again
                string payload2 = "kafka 2.";
                byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
                var msg2 = new Message(payloadData2);

                var producerRequest2 = new ProducerRequest(CurrentTestTopic, 0, new List<Message> { msg2 });
                producer.Send(producerRequest2);

                Thread.Sleep(3000);

                Assert.IsTrue(enumerator.MoveNext());
                Assert.AreEqual(msg2.ToString(), enumerator.Current.ToString());
            }
        }
Esempio n. 18
0
 public MessageTestVal(byte[] key, byte[] payload, CompressionCodecs codec, Message message)
 {
     this.Key = key;
     this.Payload = payload;
     this.Codec = codec;
     this.Message = message;
 }
Esempio n. 19
0
        public void ConsumerConnectorConsumesTwoDifferentTopics()
        {
            string topic1 = CurrentTestTopic + "1";
            string topic2 = CurrentTestTopic + "2";

            // first producing
            string payload1 = "kafka 1.";
            byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
            var msg1 = new Message(payloadData1);

            string payload2 = "kafka 2.";
            byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
            var msg2 = new Message(payloadData2);

            var producerConfig = new SyncProducerConfig(clientConfig);
            var producer = new SyncProducer(producerConfig);
            var producerRequest1 = new ProducerRequest(topic1, 0, new List<Message> { msg1 });
            producer.Send(producerRequest1);
            var producerRequest2 = new ProducerRequest(topic2, 0, new List<Message> { msg2 });
            producer.Send(producerRequest2);

            // now consuming
            var config = new ConsumerConfig(clientConfig) { AutoCommit = false };
            var resultMessages1 = new List<Message>();
            var resultMessages2 = new List<Message>();
            using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(config, true))
            {
                var topicCount = new Dictionary<string, int> { { topic1, 1 }, { topic2, 1 } };
                var messages = consumerConnector.CreateMessageStreams(topicCount);

                Assert.IsTrue(messages.ContainsKey(topic1));
                Assert.IsTrue(messages.ContainsKey(topic2));

                var sets1 = messages[topic1];
                try
                {
                    foreach (var set in sets1)
                    {
                        foreach (var message in set)
                        {
                            resultMessages1.Add(message);
                        }
                    }
                }
                catch (ConsumerTimeoutException)
                {
                    // do nothing, this is expected
                }

                var sets2 = messages[topic2];
                try
                {
                    foreach (var set in sets2)
                    {
                        foreach (var message in set)
                        {
                            resultMessages2.Add(message);
                        }
                    }
                }
                catch (ConsumerTimeoutException)
                {
                    // do nothing, this is expected
                }
            }

            Assert.AreEqual(1, resultMessages1.Count);
            Assert.AreEqual(msg1.ToString(), resultMessages1[0].ToString());

            Assert.AreEqual(1, resultMessages2.Count);
            Assert.AreEqual(msg2.ToString(), resultMessages2[0].ToString());
        }
Esempio n. 20
0
        public void ProducerSendsMessage()
        {
            string payload1 = "kafka 1.";
            byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
            Message msg1 = new Message(payloadData1);

            string payload2 = "kafka 2.";
            byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
            Message msg2 = new Message(payloadData2);

            var config = new SyncProducerConfig(clientConfig);
            var producer = new SyncProducer(config);
            var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List<Message>() { msg1, msg2 });
            producer.Send(producerRequest);
        }
Esempio n. 21
0
        public void TestMessageSizeTooLarge()
        {
            var props = TestUtils.GetSyncProducerConfig(this.Configs[0].Port);

            var producer = new SyncProducer(props);
            AdminUtils.CreateTopic(this.ZkClient, "test", 1, 1, new Dictionary<string, string>());
            TestUtils.WaitUntilLeaderIsElectedOrChanged(this.ZkClient, "test", 0, 500);
            TestUtils.WaitUntilMetadataIsPropagated(Servers, "test", 0, 2000);

            var message1 = new Message(new byte[Configs[0].MessageMaxBytes + 1]);
            var messageSet1 = new ByteBufferMessageSet(CompressionCodecs.NoCompressionCodec, new List<Message> { message1 });
            var response1 = producer.Send(TestUtils.ProduceRequest("test", 0, messageSet1, acks: 1));

            Assert.Equal(1, response1.Status.Count(kvp => kvp.Value.Error != ErrorMapping.NoError));
            Assert.Equal(ErrorMapping.MessageSizeTooLargeCode, response1.Status[new TopicAndPartition("test", 0)].Error);
            Assert.Equal(-1L, response1.Status[new TopicAndPartition("test", 0)].Offset);

            var safeSize = Configs[0].MessageMaxBytes - Message.MessageOverhead - MessageSet.LogOverhead - 1;
            var message2 = new Message(new byte[safeSize]);
            var messageSet2 = new ByteBufferMessageSet(
                CompressionCodecs.NoCompressionCodec, new List<Message> { message2 });
            var response2 = producer.Send(TestUtils.ProduceRequest("test", 0, messageSet2, acks: 1));

            Assert.Equal(0, response2.Status.Count(kvp => kvp.Value.Error != ErrorMapping.NoError));
            Assert.Equal(ErrorMapping.NoError, response2.Status[new TopicAndPartition("test", 0)].Error);
            Assert.Equal(0, response2.Status[new TopicAndPartition("test", 0)].Offset);
        }
Esempio n. 22
0
        public void ProducerSendsMessageWithLongTopic()
        {
            var prodConfig = this.SyncProducerConfig1;

            var msg = new Message(Encoding.UTF8.GetBytes("test message"));
            string topic = "ThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopic";
            using (var producer = new SyncProducer(prodConfig))
            {
                var producerRequest = new ProducerRequest(topic, 0, new List<Message> { msg });
                producer.Send(producerRequest);
            }
        }
Esempio n. 23
0
        public void ProducerSendsAndConsumerReceivesMultiRequest()
        {
            string testTopic1 = CurrentTestTopic + "1";
            string testTopic2 = CurrentTestTopic + "2";
            string testTopic3 = CurrentTestTopic + "3";

            Message sourceMessage1 = new Message(Encoding.UTF8.GetBytes("1: TestMessage"));
            Message sourceMessage2 = new Message(Encoding.UTF8.GetBytes("2: TestMessage"));
            Message sourceMessage3 = new Message(Encoding.UTF8.GetBytes("3: TestMessage"));
            Message sourceMessage4 = new Message(Encoding.UTF8.GetBytes("4: TestMessage"));

            List<ProducerRequest> requests = new List<ProducerRequest>
            {
                new ProducerRequest(testTopic1, 0, new List<Message> { sourceMessage1 }),
                new ProducerRequest(testTopic1, 0, new List<Message> { sourceMessage2 }),
                new ProducerRequest(testTopic2, 0, new List<Message> { sourceMessage3 }),
                new ProducerRequest(testTopic3, 0, new List<Message> { sourceMessage4 })
            };

            var config = new SyncProducerConfig(clientConfig);
            var producer = new SyncProducer(config);

            long currentOffset1 = TestHelper.GetCurrentKafkaOffset(testTopic1, clientConfig);
            long currentOffset2 = TestHelper.GetCurrentKafkaOffset(testTopic2, clientConfig);
            long currentOffset3 = TestHelper.GetCurrentKafkaOffset(testTopic3, clientConfig);

            producer.MultiSend(requests);

            ConsumerConfig consumerConfig = new ConsumerConfig(clientConfig);
            IConsumer consumer = new Consumers.Consumer(consumerConfig);
            MultiFetchRequest request = new MultiFetchRequest(new List<FetchRequest>
            {
                new FetchRequest(testTopic1, 0, currentOffset1),
                new FetchRequest(testTopic2, 0, currentOffset2),
                new FetchRequest(testTopic3, 0, currentOffset3)
            });
            IList<BufferedMessageSet> messageSets;
            int totalWaitTimeInMiliseconds = 0;
            int waitSingle = 100;
            while (true)
            {
                Thread.Sleep(waitSingle);
                messageSets = consumer.MultiFetch(request);
                if (messageSets.Count > 2 && messageSets[0].Messages.Count() > 0 && messageSets[1].Messages.Count() > 0 && messageSets[2].Messages.Count() > 0)
                {
                    break;
                }
                else
                {
                    totalWaitTimeInMiliseconds += waitSingle;
                    if (totalWaitTimeInMiliseconds >= MaxTestWaitTimeInMiliseconds)
                    {
                        break;
                    }
                }
            }

            Assert.AreEqual(3, messageSets.Count);
            Assert.AreEqual(2, messageSets[0].Messages.Count());
            Assert.AreEqual(1, messageSets[1].Messages.Count());
            Assert.AreEqual(1, messageSets[2].Messages.Count());
            Assert.AreEqual(sourceMessage1.ToString(), messageSets[0].Messages.First().ToString());
            Assert.AreEqual(sourceMessage2.ToString(), messageSets[0].Messages.Skip(1).First().ToString());
            Assert.AreEqual(sourceMessage3.ToString(), messageSets[1].Messages.First().ToString());
            Assert.AreEqual(sourceMessage4.ToString(), messageSets[2].Messages.First().ToString());
        }
Esempio n. 24
0
        public void ProducerSendsMessage()
        {
            var prodConfig = this.SyncProducerConfig1;

            string payload1 = "kafka 1.";
            byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
            var msg1 = new Message(payloadData1);

            string payload2 = "kafka 2.";
            byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
            var msg2 = new Message(payloadData2);

            using (var producer = new SyncProducer(prodConfig))
            {
                var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List<Message> { msg1, msg2 });
                producer.Send(producerRequest);
            }
        }
Esempio n. 25
0
        public void ZkAwareProducerSends3Messages()
        {
            var prodConfig = this.ZooKeeperBasedSyncProdConfig;
            int totalWaitTimeInMiliseconds = 0;
            int waitSingle = 100;
            var originalMessage1 = new Message(Encoding.UTF8.GetBytes("TestData1"));
            var originalMessage2 = new Message(Encoding.UTF8.GetBytes("TestData2"));
            var originalMessage3 = new Message(Encoding.UTF8.GetBytes("TestData3"));
            var originalMessageList = new List<Message> { originalMessage1, originalMessage2, originalMessage3 };

            var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic);
            multipleBrokersHelper.GetCurrentOffsets(new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 });

            var mockPartitioner = new MockAlwaysZeroPartitioner();
            using (var producer = new Producer<string, Message>(prodConfig, mockPartitioner, new DefaultEncoder()))
            {
                var producerData = new ProducerData<string, Message>(CurrentTestTopic, "somekey", originalMessageList);
                producer.Send(producerData);

                while (!multipleBrokersHelper.CheckIfAnyBrokerHasChanged(new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 }))
                {
                    totalWaitTimeInMiliseconds += waitSingle;
                    Thread.Sleep(waitSingle);
                    if (totalWaitTimeInMiliseconds > this.maxTestWaitTimeInMiliseconds)
                    {
                        Assert.Fail("None of the brokers changed their offset after sending a message");
                    }
                }

                totalWaitTimeInMiliseconds = 0;

                var consumerConfig = new ConsumerConfiguration(
                    multipleBrokersHelper.BrokerThatHasChanged.Host,
                    multipleBrokersHelper.BrokerThatHasChanged.Port);
                IConsumer consumer = new Consumer(consumerConfig);
                var request = new FetchRequest(CurrentTestTopic, 0, multipleBrokersHelper.OffsetFromBeforeTheChange);
                BufferedMessageSet response;

                while (true)
                {
                    Thread.Sleep(waitSingle);
                    response = consumer.Fetch(request);
                    if (response != null && response.Messages.Count() > 2)
                    {
                        break;
                    }

                    totalWaitTimeInMiliseconds += waitSingle;
                    if (totalWaitTimeInMiliseconds >= this.maxTestWaitTimeInMiliseconds)
                    {
                        break;
                    }
                }

                Assert.NotNull(response);
                Assert.AreEqual(3, response.Messages.Count());
                Assert.AreEqual(originalMessage1.ToString(), response.Messages.First().ToString());
                Assert.AreEqual(originalMessage2.ToString(), response.Messages.Skip(1).First().ToString());
                Assert.AreEqual(originalMessage3.ToString(), response.Messages.Skip(2).First().ToString());
            }
        }
Esempio n. 26
0
        internal static Message ParseFrom(KafkaBinaryReader reader, int size)
        {
            Message result;
            int readed = 0;
            byte magic = reader.ReadByte();
            readed++;
            byte[] checksum;
            byte[] payload;
            if (magic == 1)
            {
                byte attributes = reader.ReadByte();
                readed++;
                checksum = reader.ReadBytes(4);
                readed += 4;
                payload = reader.ReadBytes(size - (DefaultHeaderSize + 1));
                readed += size - (DefaultHeaderSize + 1);
                result = new Message(payload, checksum, Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask));
            }
            else
            {
                checksum = reader.ReadBytes(4);
                readed += 4;
                payload = reader.ReadBytes(size - DefaultHeaderSize);
                readed += size - DefaultHeaderSize;
                result = new Message(payload, checksum);
            }

            if (size != readed)
            {
                throw new KafkaException(KafkaException.InvalidRetchSizeCode);
            }

            return result;
        }
Esempio n. 27
0
        public void WriteToValidSequenceForCustomConstructor()
        {
            byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 };
            byte[] customChecksum = new byte[] { 3, 4, 5, 6 };
            Message message = new Message(messageBytes, (byte)33, customChecksum);
            MemoryStream ms = new MemoryStream();
            message.WriteTo(ms);

            Assert.AreEqual((byte)33, ms.ToArray()[MagicNumberPartOffset]);

            byte[] checksumPart = new byte[ChecksumPartLength];
            Array.Copy(ms.ToArray(), ChecksumPartOffset, checksumPart, 0, ChecksumPartLength);
            Assert.AreEqual(customChecksum, checksumPart);

            byte[] dataPart = new byte[messageBytes.Length];
            Array.Copy(ms.ToArray(), DataPartOffset, dataPart, 0, messageBytes.Length);
            Assert.AreEqual(messageBytes, dataPart);
        }
Esempio n. 28
0
 public void ProducerSendsMessageWithLongTopic()
 {
     Message msg = new Message(Encoding.UTF8.GetBytes("test message"));
     string topic = "ThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopicThisIsAVeryLongTopic";
     var config = new SyncProducerConfig(clientConfig);
     var producer = new SyncProducer(config);
     var producerRequest = new ProducerRequest(topic, 0, new List<Message>() { msg });
     producer.Send(producerRequest);
 }
Esempio n. 29
0
        /**
        * A message. The format of an N byte message is the following:
        *
        * 1. 4 byte CRC32 of the message
        * 2. 1 byte "magic" identifier to allow format changes, value is 2 currently
        * 3. 1 byte "attributes" identifier to allow annotations on the message independent of the version (e.g. compression enabled, type of codec used)
        * 4. 4 byte key length, containing length K
        * 5. K byte key
        * 6. 4 byte payload length, containing length V
        * 7. V byte payload
        *
        */
        internal static Message ParseFrom(KafkaBinaryReader reader, long offset, int size, int partitionID)
        {
            Message result;
            int readed = 0;
            uint checksum = reader.ReadUInt32();
            readed += 4;
            byte magic = reader.ReadByte();
            readed++;

            byte[] payload;
            if (magic == 2 || magic == 0) // some producers (CLI) send magic 0 while others have value of 2
            {
                byte attributes = reader.ReadByte();
                readed++;
                var keyLength = reader.ReadInt32();
                readed += 4;
                byte[] key = null;
                if (keyLength != -1)
                {
                    key = reader.ReadBytes(keyLength);
                    readed += keyLength;
                }
                var payloadSize = reader.ReadInt32();
                readed += 4;
                payload = reader.ReadBytes(payloadSize);
                readed += payloadSize;
                result = new Message(payload, key, Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask))
                {
                    Offset = offset,
                    PartitionId = partitionID
                };
            }
            else
            {
                payload = reader.ReadBytes(size - DefaultHeaderSize);
                readed += size - DefaultHeaderSize;
                result = new Message(payload) { Offset = offset, PartitionId = partitionID };
            }

            if (size != readed)
            {
                throw new KafkaException(ErrorMapping.InvalidFetchSizeCode);
            }

            return result;
        }
Esempio n. 30
0
 public void SetSizeValid()
 {
     byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5 };
     Message msg1 = new Message(messageBytes);
     Message msg2 = new Message(messageBytes);
     MessageSet messageSet = new BufferedMessageSet(new List<Message>() { msg1, msg2 });
     Assert.AreEqual(
         2 * (MessageLengthPartLength + MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length),
         messageSet.SetSize);
 }
Esempio n. 31
0
        /// <summary>
        /// Gives the size of a size-delimited entry in a message set
        /// </summary>
        /// <param name="message">
        /// The message.
        /// </param>
        /// <returns>
        /// Size of message
        /// </returns>
        public static int GetEntrySize(Message message)
        {
            Guard.NotNull(message, "message");

            return message.Size + DefaultMessageLengthSize;
        }
Esempio n. 32
0
        /// <summary>
        /// Gives the size of a size-delimited entry in a message set
        /// </summary>
        /// <param name="message">
        /// The message.
        /// </param>
        /// <returns>
        /// Size of message
        /// </returns>
        public static int GetEntrySize(Message message)
        {
            Guard.Assert<ArgumentNullException>(() => message != null);

            return message.Size + DefaultMessageLengthSize;
        }