public void BufferedMessageSetWriteToValidSequence()
        {
            byte[] messageBytes = { 1, 2, 3, 4, 5 };
            var    msg1         = new Message(messageBytes)
            {
                Offset = 0
            };
            var msg2 = new Message(messageBytes);

            msg2.Offset = 1;
            MessageSet messageSet = new BufferedMessageSet(new List <Message>()
            {
                msg1, msg2
            }, 0);
            var ms = new MemoryStream();

            messageSet.WriteTo(ms);

            var reader     = new KafkaBinaryReader(ms);
            int baseOffset = 0;

            for (int i = 0; i < 2; ++i)
            {
                reader.ReadInt64().Should().Be(i);  // offset
                var msgLength = reader.ReadInt32(); // length
                msgLength.Should().Be(Message.DefaultHeaderSize + msg1.PayloadSize);
                reader.ReadUInt32().Should().Be(Crc32Hasher.ComputeCrcUint32(ms.GetBuffer(), baseOffset + 8 + 4 + 4, msgLength - 4));
                reader.ReadByte().Should().Be(0);                    // magic
                reader.ReadByte().Should().Be(msg1.Attributes);
                reader.ReadInt32().Should().Be(-1);                  // key length
                reader.ReadInt32().Should().Be(messageBytes.Length); // message length
                reader.ReadBytes(messageBytes.Length).SequenceEqual(messageBytes).Should().BeTrue();
                baseOffset += 8 + 4 + msgLength;
            }
        }
コード例 #2
0
ファイル: Message.cs プロジェクト: tnachen/kafka
        internal static Message ParseFrom(KafkaBinaryReader reader, int size)
        {
            Message result;
            int     readed = 0;
            byte    magic  = reader.ReadByte();

            readed++;
            byte[] checksum;
            byte[] payload;
            if (magic == 1)
            {
                byte attributes = reader.ReadByte();
                readed++;
                checksum = reader.ReadBytes(4);
                readed  += 4;
                payload  = reader.ReadBytes(size - (DefaultHeaderSize + 1));
                readed  += size - (DefaultHeaderSize + 1);
                result   = new Message(payload, checksum, Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask));
            }
            else
            {
                checksum = reader.ReadBytes(4);
                readed  += 4;
                payload  = reader.ReadBytes(size - DefaultHeaderSize);
                readed  += size - DefaultHeaderSize;
                result   = new Message(payload, checksum);
            }

            if (size != readed)
            {
                throw new KafkaException(ErrorMapping.InvalidFetchSizeCode);
            }

            return(result);
        }
コード例 #3
0
        public void GetBytesValidSequence()
        {
            var payload = Encoding.UTF8.GetBytes("kafka");
            Message message = new Message(payload, CompressionCodecs.NoCompressionCodec);

            MemoryStream ms = new MemoryStream();
            message.WriteTo(ms);

            Assert.AreEqual(message.Size, ms.Length);

            var crc = Crc32Hasher.ComputeCrcUint32(ms.GetBuffer(), 4, (int)(ms.Length - 4));

            // first 4 bytes = the crc
            using (var reader = new KafkaBinaryReader(ms))
            {
                Assert.AreEqual(crc, reader.ReadUInt32());

                // magic
                Assert.AreEqual(message.Magic, reader.ReadByte());

                // attributes
                Assert.AreEqual((byte)0, reader.ReadByte());

                // key size
                Assert.AreEqual(-1, reader.ReadInt32());

                // payload size
                Assert.AreEqual(payload.Length, reader.ReadInt32());

                // remaining bytes = the payload
                payload.SequenceEqual(reader.ReadBytes(10)).Should().BeTrue();
            }
        }
コード例 #4
0
        public void GetBytesValidSequence()
        {
            var     payload = Encoding.UTF8.GetBytes("kafka");
            Message message = new Message(payload, CompressionCodecs.NoCompressionCodec);

            MemoryStream ms = new MemoryStream();

            message.WriteTo(ms);

            Assert.AreEqual(message.Size, ms.Length);

            var crc = Crc32Hasher.ComputeCrcUint32(ms.GetBuffer(), 4, (int)(ms.Length - 4));

            // first 4 bytes = the crc
            using (var reader = new KafkaBinaryReader(ms))
            {
                Assert.AreEqual(crc, reader.ReadUInt32());

                // magic
                Assert.AreEqual(message.Magic, reader.ReadByte());

                // attributes
                Assert.AreEqual((byte)0, reader.ReadByte());

                // key size
                Assert.AreEqual(-1, reader.ReadInt32());

                // payload size
                Assert.AreEqual(payload.Length, reader.ReadInt32());

                // remaining bytes = the payload
                payload.SequenceEqual(reader.ReadBytes(10)).Should().BeTrue();
            }
        }
コード例 #5
0
        /**
         * A message. The format of an N byte message is the following:
         *
         * 1. 4 byte CRC32 of the message
         * 2. 1 byte "magic" identifier to allow format changes, value is 2 currently
         * 3. 1 byte "attributes" identifier to allow annotations on the message independent of the version (e.g. compression enabled, type of codec used)
         * 4. 4 byte key length, containing length K
         * 5. K byte key
         * 6. 4 byte payload length, containing length V
         * 7. V byte payload
         *
         */
        internal static Message ParseFrom(KafkaBinaryReader reader, long offset, int size, int partitionID)
        {
            Message result;
            var     readed   = 0;
            var     checksum = reader.ReadUInt32();

            readed += 4;
            var magic = reader.ReadByte();

            readed++;

            byte[] payload;
            if (magic == 2 || magic == 0) // some producers (CLI) send magic 0 while others have value of 2
            {
                var attributes = reader.ReadByte();
                readed++;
                var keyLength = reader.ReadInt32();
                readed += 4;
                byte[] key = null;
                if (keyLength != -1)
                {
                    key     = reader.ReadBytes(keyLength);
                    readed += keyLength;
                }
                var payloadSize = reader.ReadInt32();
                readed += 4;
                payload = reader.ReadBytes(payloadSize);
                readed += payloadSize;
                result  = new Message(payload, key,
                                      Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask))
                {
                    Offset      = offset,
                    PartitionId = partitionID
                };
            }
            else
            {
                payload = reader.ReadBytes(size - DefaultHeaderSize);
                readed += size - DefaultHeaderSize;
                result  = new Message(payload)
                {
                    Offset = offset, PartitionId = partitionID
                };
            }

            if (size != readed)
            {
                throw new KafkaException(ErrorMapping.InvalidFetchSizeCode);
            }

            return(result);
        }