Ejemplo n.º 1
0
        /// <summary>
        /// Creates string representation of message
        /// </summary>
        /// <param name="reader">
        /// The reader.
        /// </param>
        /// <param name="count">
        /// The count.
        /// </param>
        /// <returns>
        /// String representation of message
        /// </returns>
        public static string ParseFrom(KafkaBinaryReader reader, int count)
        {
            Guard.Assert<ArgumentNullException>(() => reader != null);
            var sb = new StringBuilder();
            int payloadSize = count - DefaultHeaderSize;
            sb.Append("Magic: ");
            sb.Append(reader.ReadByte());
            sb.Append(", Checksum: ");
            for (int i = 0; i < 4; i++)
            {
                sb.Append("[");
                sb.Append(reader.ReadByte());
                sb.Append("]");
            }

            sb.Append(", topic: ");
            var encodedPayload = reader.ReadBytes(payloadSize);
            try
            {
                sb.Append(Encoding.UTF8.GetString(encodedPayload));
            }
            catch (Exception)
            {
                sb.Append("n/a");
            }

            return sb.ToString();
        }
Ejemplo n.º 2
0
        /**
        * A message. The format of an N byte message is the following:
        *
        * 1. 4 byte CRC32 of the message
        * 2. 1 byte "magic" identifier to allow format changes, value is 2 currently
        * 3. 1 byte "attributes" identifier to allow annotations on the message independent of the version (e.g. compression enabled, type of codec used)
        * 4. 4 byte key length, containing length K
        * 5. K byte key
        * 6. 4 byte payload length, containing length V
        * 7. V byte payload
        *
        */
        internal static Message ParseFrom(KafkaBinaryReader reader, long offset, int size, int partitionID)
        {
            Message result;
            int readed = 0;
            uint checksum = reader.ReadUInt32();
            readed += 4;
            byte magic = reader.ReadByte();
            readed++;

            byte[] payload;
            if (magic == 2 || magic == 0) // some producers (CLI) send magic 0 while others have value of 2
            {
                byte attributes = reader.ReadByte();
                readed++;
                var keyLength = reader.ReadInt32();
                readed += 4;
                byte[] key = null;
                if (keyLength != -1)
                {
                    key = reader.ReadBytes(keyLength);
                    readed += keyLength;
                }
                var payloadSize = reader.ReadInt32();
                readed += 4;
                payload = reader.ReadBytes(payloadSize);
                readed += payloadSize;
                result = new Message(payload, key, Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask))
                {
                    Offset = offset,
                    PartitionId = partitionID
                };
            }
            else
            {
                payload = reader.ReadBytes(size - DefaultHeaderSize);
                readed += size - DefaultHeaderSize;
                result = new Message(payload) { Offset = offset, PartitionId = partitionID };
            }

            if (size != readed)
            {
                throw new KafkaException(ErrorMapping.InvalidFetchSizeCode);
            }

            return result;
        }
Ejemplo n.º 3
0
        internal static Message ParseFrom(KafkaBinaryReader reader, int size)
        {
            Message result;
            int readed = 0;
            byte magic = reader.ReadByte();
            readed++;
            byte[] checksum;
            byte[] payload;
            if (magic == 1)
            {
                byte attributes = reader.ReadByte();
                readed++;
                checksum = reader.ReadBytes(4);
                readed += 4;
                payload = reader.ReadBytes(size - (DefaultHeaderSize + 1));
                readed += size - (DefaultHeaderSize + 1);
                result = new Message(payload, checksum, Messages.CompressionCodec.GetCompressionCodec(attributes & CompressionCodeMask));
            }
            else
            {
                checksum = reader.ReadBytes(4);
                readed += 4;
                payload = reader.ReadBytes(size - DefaultHeaderSize);
                readed += size - DefaultHeaderSize;
                result = new Message(payload, checksum);
            }

            if (size != readed)
            {
                throw new KafkaException(KafkaException.InvalidRetchSizeCode);
            }

            return result;
        }