Esempio n. 1
0
        internal static Message ParseFrom(KafkaBinaryReader reader, int size)
        {
            Message result;
            int     readed = 0;
            byte    magic  = reader.ReadByte();

            readed++;
            byte[] checksum;
            byte[] payload;
            if (magic == 1)
            {
                byte attributes = reader.ReadByte();
                readed++;
                checksum = reader.ReadBytes(4);
                readed  += 4;
                payload  = reader.ReadBytes(size - (DefaultHeaderSize + 1));
                readed  += size - (DefaultHeaderSize + 1);
                result   = new Message(payload, checksum, Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask));
            }
            else
            {
                checksum = reader.ReadBytes(4);
                readed  += 4;
                payload  = reader.ReadBytes(size - DefaultHeaderSize);
                readed  += size - DefaultHeaderSize;
                result   = new Message(payload, checksum);
            }

            if (size != readed)
            {
                throw new KafkaException(ErrorMapping.InvalidFetchSizeCode);
            }

            return(result);
        }
Esempio n. 2
0
        /**
         * A message. The format of an N byte message is the following:
         *
         * 1. 4 byte CRC32 of the message
         * 2. 1 byte "magic" identifier to allow format changes, value is 2 currently
         * 3. 1 byte "attributes" identifier to allow annotations on the message independent of the version (e.g. compression enabled, type of codec used)
         * 4. 4 byte key length, containing length K
         * 5. K byte key
         * 6. 4 byte payload length, containing length V
         * 7. V byte payload
         *
         */
        internal static Message ParseFrom(KafkaBinaryReader reader, long offset, int size, int partitionID)
        {
            Message result;
            var     readed   = 0;
            var     checksum = reader.ReadUInt32();

            readed += 4;
            var magic = reader.ReadByte();

            readed++;

            byte[] payload;
            if (magic == 2 || magic == 0) // some producers (CLI) send magic 0 while others have value of 2
            {
                var attributes = reader.ReadByte();
                readed++;
                var keyLength = reader.ReadInt32();
                readed += 4;
                byte[] key = null;
                if (keyLength != -1)
                {
                    key     = reader.ReadBytes(keyLength);
                    readed += keyLength;
                }
                var payloadSize = reader.ReadInt32();
                readed += 4;
                payload = reader.ReadBytes(payloadSize);
                readed += payloadSize;
                result  = new Message(payload, key,
                                      Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask))
                {
                    Offset      = offset,
                    PartitionId = partitionID
                };
            }
            else
            {
                payload = reader.ReadBytes(size - DefaultHeaderSize);
                readed += size - DefaultHeaderSize;
                result  = new Message(payload)
                {
                    Offset = offset, PartitionId = partitionID
                };
            }

            if (size != readed)
            {
                throw new KafkaException(ErrorMapping.InvalidFetchSizeCode);
            }

            return(result);
        }
        public void BufferedMessageSetWriteToValidSequence()
        {
            byte[] messageBytes = { 1, 2, 3, 4, 5 };
            var    msg1         = new Message(messageBytes)
            {
                Offset = 0
            };
            var msg2 = new Message(messageBytes);

            msg2.Offset = 1;
            MessageSet messageSet = new BufferedMessageSet(new List <Message>()
            {
                msg1, msg2
            }, 0);
            var ms = new MemoryStream();

            messageSet.WriteTo(ms);

            var reader     = new KafkaBinaryReader(ms);
            int baseOffset = 0;

            for (int i = 0; i < 2; ++i)
            {
                reader.ReadInt64().Should().Be(i);  // offset
                var msgLength = reader.ReadInt32(); // length
                msgLength.Should().Be(Message.DefaultHeaderSize + msg1.PayloadSize);
                reader.ReadUInt32().Should().Be(Crc32Hasher.ComputeCrcUint32(ms.GetBuffer(), baseOffset + 8 + 4 + 4, msgLength - 4));
                reader.ReadByte().Should().Be(0);                    // magic
                reader.ReadByte().Should().Be(msg1.Attributes);
                reader.ReadInt32().Should().Be(-1);                  // key length
                reader.ReadInt32().Should().Be(messageBytes.Length); // message length
                reader.ReadBytes(messageBytes.Length).SequenceEqual(messageBytes).Should().BeTrue();
                baseOffset += 8 + 4 + msgLength;
            }
        }
        public void GetBytesValidSequence()
        {
            var     payload = Encoding.UTF8.GetBytes("kafka");
            Message message = new Message(payload, CompressionCodecs.NoCompressionCodec);

            MemoryStream ms = new MemoryStream();

            message.WriteTo(ms);

            Assert.AreEqual(message.Size, ms.Length);

            var crc = Crc32Hasher.ComputeCrcUint32(ms.GetBuffer(), 4, (int)(ms.Length - 4));

            // first 4 bytes = the crc
            using (var reader = new KafkaBinaryReader(ms))
            {
                Assert.AreEqual(crc, reader.ReadUInt32());

                // magic
                Assert.AreEqual(message.Magic, reader.ReadByte());

                // attributes
                Assert.AreEqual((byte)0, reader.ReadByte());

                // key size
                Assert.AreEqual(-1, reader.ReadInt32());

                // payload size
                Assert.AreEqual(payload.Length, reader.ReadInt32());

                // remaining bytes = the payload
                payload.SequenceEqual(reader.ReadBytes(10)).Should().BeTrue();
            }
        }
        public void GetBytesValidSequence()
        {
            var payload = Encoding.UTF8.GetBytes("kafka");
            Message message = new Message(payload, CompressionCodecs.NoCompressionCodec);

            MemoryStream ms = new MemoryStream();
            message.WriteTo(ms);

            Assert.AreEqual(message.Size, ms.Length);

            var crc = Crc32Hasher.ComputeCrcUint32(ms.GetBuffer(), 4, (int)(ms.Length - 4));

            // first 4 bytes = the crc
            using (var reader = new KafkaBinaryReader(ms))
            {
                Assert.AreEqual(crc, reader.ReadUInt32());

                // magic
                Assert.AreEqual(message.Magic, reader.ReadByte());

                // attributes
                Assert.AreEqual((byte)0, reader.ReadByte());

                // key size
                Assert.AreEqual(-1, reader.ReadInt32());

                // payload size
                Assert.AreEqual(payload.Length, reader.ReadInt32());

                // remaining bytes = the payload
                payload.SequenceEqual(reader.ReadBytes(10)).Should().BeTrue();
            }
        }
 public static string ReadShortString(KafkaBinaryReader reader, string encoding)
 {
     var size = reader.ReadInt16();
     if (size < 0)
     {
         return null;
     }
     var bytes = reader.ReadBytes(size);
     Encoding encoder = Encoding.GetEncoding(encoding);
     return encoder.GetString(bytes);
 }
        public static BufferedMessageSet ParseFrom(KafkaBinaryReader reader, int size, int partitionID)
        {
            int bytesLeft = size;

            if (bytesLeft == 0)
            {
                return(new BufferedMessageSet(Enumerable.Empty <Message>(), partitionID));
            }

            var messages = new List <Message>();

            do
            {
                // Already read last message
                if (bytesLeft < 12)
                {
                    break;
                }

                long offset  = reader.ReadInt64();
                int  msgSize = reader.ReadInt32();
                bytesLeft -= 12;

                if (msgSize > bytesLeft || msgSize < 0)
                {
                    break;
                }

                Message msg = Message.ParseFrom(reader, offset, msgSize, partitionID);
                bytesLeft -= msgSize;
                messages.Add(msg);
            }while (bytesLeft > 0);

            if (bytesLeft > 0)
            {
                reader.ReadBytes(bytesLeft);
            }

            return(new BufferedMessageSet(messages, partitionID));
        }