Esempio n. 1
0
        /// <summary>
        /// Decode messages from a payload and assign it a given kafka offset.
        /// </summary>
        /// <param name="reader">The reader</param>
        /// <param name="messageSize">The size of the message, for Crc Hash calculation</param>
        /// <param name="offset">The offset represting the log entry from kafka of this message.</param>
        /// <param name="partitionId">The partition being read</param>
        /// <returns>Enumerable representing stream of messages decoded from byte[].</returns>
        /// <remarks>The return type is an Enumerable as the message could be a compressed message set.</remarks>
        public static IImmutableList <Message> ReadMessage(this IKafkaReader reader, int messageSize, long offset, int partitionId = 0)
        {
            var crc     = BitConverter.ToUInt32(reader.RawRead(4), 0);
            var crcHash = BitConverter.ToUInt32(reader.CrcHash(messageSize - 4), 0);

            if (crc != crcHash)
            {
                throw new CrcValidationException("Buffer did not match CRC validation.")
                      {
                          Crc = crc, CalculatedCrc = crcHash
                      }
            }
            ;

            var      messageVersion = reader.ReadByte();
            var      attribute      = reader.ReadByte();
            DateTime?timestamp      = null;

            if (messageVersion >= 1)
            {
                var milliseconds = reader.ReadInt64();
                if (milliseconds >= 0)
                {
                    timestamp = milliseconds.FromUnixEpochMilliseconds();
                }
            }
            var key = reader.ReadBytes();

            var codec = (MessageCodec)(Message.AttributeMask & attribute);

            switch (codec)
            {
            case MessageCodec.CodecNone: {
                var value = reader.ReadBytes();
                return(ImmutableList <Message> .Empty.Add(new Message(value, attribute, offset, partitionId, messageVersion, key, timestamp)));
            }

            case MessageCodec.CodecGzip: {
                var messageLength = reader.ReadInt32();
                var messageStream = new LimitedReadableStream(reader.BaseStream, messageLength);
                using (var gzipReader = new BigEndianBinaryReader(messageStream.Unzip())) {
                    return(gzipReader.ReadMessages(partitionId));
                }
            }

            default:
                throw new NotSupportedException($"Codec type of {codec} is not supported.");
            }
        }
Esempio n. 2
0
        /// <inheritdoc />
        public override IMemberMetadata DecodeMetadata(IKafkaReader reader)
        {
            var version    = reader.ReadInt16();
            var topicNames = new string[reader.ReadInt32()];

            for (var t = 0; t < topicNames.Length; t++)
            {
                topicNames[t] = reader.ReadString();
            }
            var userData = reader.ReadBytes();

            return(new ConsumerProtocolMetadata(version, topicNames, userData));
        }
Esempio n. 3
0
        /// <inheritdoc />
        protected override ConsumerProtocolMetadata DecodeMetadata(string assignmentStrategy, IKafkaReader reader, int expectedLength)
        {
            var version    = reader.ReadInt16();
            var topicNames = new string[reader.ReadInt32()];

            for (var t = 0; t < topicNames.Length; t++)
            {
                topicNames[t] = reader.ReadString();
            }
            var userData = reader.ReadBytes();

            return(new ConsumerProtocolMetadata(topicNames, assignmentStrategy, userData, version));
        }
Esempio n. 4
0
        /// <summary>
        /// Decode messages from a payload and assign it a given kafka offset.
        /// </summary>
        /// <param name="reader">The reader</param>
        /// <param name="messageSize">The size of the message, for Crc Hash calculation</param>
        /// <param name="offset">The offset represting the log entry from kafka of this message.</param>
        /// <returns>Enumerable representing stream of messages decoded from byte[].</returns>
        /// <remarks>The return type is an Enumerable as the message could be a compressed message set.</remarks>
        public static IImmutableList <Message> ReadMessage(this IKafkaReader reader, int messageSize, long offset)
        {
            var crc     = reader.ReadUInt32();
            var crcHash = reader.ReadCrc(messageSize - 4);

            if (crc != crcHash)
            {
                throw new CrcValidationException(crc, crcHash);
            }

            var            messageVersion = reader.ReadByte();
            var            attribute      = reader.ReadByte();
            DateTimeOffset?timestamp      = null;

            if (messageVersion >= 1)
            {
                var milliseconds = reader.ReadInt64();
                if (milliseconds >= 0)
                {
                    timestamp = DateTimeOffset.FromUnixTimeMilliseconds(milliseconds);
                }
            }
            var key   = reader.ReadBytes();
            var value = reader.ReadBytes();

            var codec = (MessageCodec)(Message.CodecMask & attribute);

            if (codec == MessageCodec.None)
            {
                return(ImmutableList <Message> .Empty.Add(new Message(value, key, attribute, offset, messageVersion, timestamp)));
            }
            var uncompressedBytes = value.ToUncompressed(codec);

            using (var messageSetReader = new KafkaReader(uncompressedBytes)) {
                return(messageSetReader.ReadMessages(codec));
            }
        }
Esempio n. 5
0
        /// <inheritdoc />
        protected override ConsumerMemberAssignment DecodeAssignment(IKafkaReader reader, int expectedLength)
        {
            var version = reader.ReadInt16();

            var topics     = new List <TopicPartition>();
            var topicCount = reader.ReadInt32();

            for (var t = 0; t < topicCount; t++)
            {
                var topicName = reader.ReadString();

                var partitionCount = reader.ReadInt32();
                for (var p = 0; p < partitionCount; p++)
                {
                    var partitionId = reader.ReadInt32();
                    topics.Add(new TopicPartition(topicName, partitionId));
                }
            }
            var userData = reader.ReadBytes();

            return(new ConsumerMemberAssignment(topics, userData, version));
        }
 protected override ByteTypeAssignment DecodeAssignment(IKafkaReader reader, int expectedLength)
 {
     return(new ByteTypeAssignment(reader.ReadBytes()));
 }
 protected override ByteTypeMetadata DecodeMetadata(string assignmentStrategy, IKafkaReader reader, int expectedLength)
 {
     return(new ByteTypeMetadata(assignmentStrategy, reader.ReadBytes()));
 }