/// <summary> /// Decode messages from a payload and assign it a given kafka offset. /// </summary> /// <param name="reader">The reader</param> /// <param name="messageSize">The size of the message, for Crc Hash calculation</param> /// <param name="offset">The offset represting the log entry from kafka of this message.</param> /// <param name="partitionId">The partition being read</param> /// <returns>Enumerable representing stream of messages decoded from byte[].</returns> /// <remarks>The return type is an Enumerable as the message could be a compressed message set.</remarks> public static IImmutableList <Message> ReadMessage(this IKafkaReader reader, int messageSize, long offset, int partitionId = 0) { var crc = BitConverter.ToUInt32(reader.RawRead(4), 0); var crcHash = BitConverter.ToUInt32(reader.CrcHash(messageSize - 4), 0); if (crc != crcHash) { throw new CrcValidationException("Buffer did not match CRC validation.") { Crc = crc, CalculatedCrc = crcHash } } ; var messageVersion = reader.ReadByte(); var attribute = reader.ReadByte(); DateTime?timestamp = null; if (messageVersion >= 1) { var milliseconds = reader.ReadInt64(); if (milliseconds >= 0) { timestamp = milliseconds.FromUnixEpochMilliseconds(); } } var key = reader.ReadBytes(); var codec = (MessageCodec)(Message.AttributeMask & attribute); switch (codec) { case MessageCodec.CodecNone: { var value = reader.ReadBytes(); return(ImmutableList <Message> .Empty.Add(new Message(value, attribute, offset, partitionId, messageVersion, key, timestamp))); } case MessageCodec.CodecGzip: { var messageLength = reader.ReadInt32(); var messageStream = new LimitedReadableStream(reader.BaseStream, messageLength); using (var gzipReader = new BigEndianBinaryReader(messageStream.Unzip())) { return(gzipReader.ReadMessages(partitionId)); } } default: throw new NotSupportedException($"Codec type of {codec} is not supported."); } }