/// <summary> /// Decode messages from a payload and assign it a given kafka offset. /// </summary> /// <param name="reader">The reader</param> /// <param name="messageSize">The size of the message, for Crc Hash calculation</param> /// <param name="offset">The offset represting the log entry from kafka of this message.</param> /// <param name="partitionId">The partition being read</param> /// <returns>Enumerable representing stream of messages decoded from byte[].</returns> /// <remarks>The return type is an Enumerable as the message could be a compressed message set.</remarks> public static IImmutableList <Message> ReadMessage(this IKafkaReader reader, int messageSize, long offset, int partitionId = 0) { var crc = BitConverter.ToUInt32(reader.RawRead(4), 0); var crcHash = BitConverter.ToUInt32(reader.CrcHash(messageSize - 4), 0); if (crc != crcHash) { throw new CrcValidationException("Buffer did not match CRC validation.") { Crc = crc, CalculatedCrc = crcHash } } ; var messageVersion = reader.ReadByte(); var attribute = reader.ReadByte(); DateTime?timestamp = null; if (messageVersion >= 1) { var milliseconds = reader.ReadInt64(); if (milliseconds >= 0) { timestamp = milliseconds.FromUnixEpochMilliseconds(); } } var key = reader.ReadBytes(); var codec = (MessageCodec)(Message.AttributeMask & attribute); switch (codec) { case MessageCodec.CodecNone: { var value = reader.ReadBytes(); return(ImmutableList <Message> .Empty.Add(new Message(value, attribute, offset, partitionId, messageVersion, key, timestamp))); } case MessageCodec.CodecGzip: { var messageLength = reader.ReadInt32(); var messageStream = new LimitedReadableStream(reader.BaseStream, messageLength); using (var gzipReader = new BigEndianBinaryReader(messageStream.Unzip())) { return(gzipReader.ReadMessages(partitionId)); } } default: throw new NotSupportedException($"Codec type of {codec} is not supported."); } }
/// <summary> /// Decode a byte[] that represents a collection of messages. /// </summary> /// <param name="reader">The reader</param> /// <param name="partitionId">The partitionId messages are being read from.</param> /// <returns>Enumerable representing stream of messages decoded from byte[]</returns> public static IImmutableList <Message> ReadMessages(this IKafkaReader reader, int partitionId = 0) { var expectedLength = reader.ReadInt32(); if (!reader.Available(expectedLength)) { throw new BufferUnderRunException($"Message set size of {expectedLength} is not fully available."); } var messages = ImmutableList <Message> .Empty; var finalPosition = reader.Position + expectedLength; while (reader.Position < finalPosition) { // this checks that we have at least the minimum amount of data to retrieve a header if (reader.Available(MessageHeaderSize) == false) { break; } var offset = reader.ReadInt64(); var messageSize = reader.ReadInt32(); // if the stream does not have enough left in the payload, we got only a partial message if (reader.Available(messageSize) == false) { throw new BufferUnderRunException($"Message header size of {MessageHeaderSize} is not fully available."); } try { messages = messages.AddRange(reader.ReadMessage(messageSize, offset, partitionId)); } catch (EndOfStreamException ex) { throw new BufferUnderRunException($"Message size of {messageSize} is not available.", ex); } } return(messages); }
/// <summary> /// Decode messages from a payload and assign it a given kafka offset. /// </summary> /// <param name="reader">The reader</param> /// <param name="messageSize">The size of the message, for Crc Hash calculation</param> /// <param name="offset">The offset represting the log entry from kafka of this message.</param> /// <returns>Enumerable representing stream of messages decoded from byte[].</returns> /// <remarks>The return type is an Enumerable as the message could be a compressed message set.</remarks> public static IImmutableList <Message> ReadMessage(this IKafkaReader reader, int messageSize, long offset) { var crc = reader.ReadUInt32(); var crcHash = reader.ReadCrc(messageSize - 4); if (crc != crcHash) { throw new CrcValidationException(crc, crcHash); } var messageVersion = reader.ReadByte(); var attribute = reader.ReadByte(); DateTimeOffset?timestamp = null; if (messageVersion >= 1) { var milliseconds = reader.ReadInt64(); if (milliseconds >= 0) { timestamp = DateTimeOffset.FromUnixTimeMilliseconds(milliseconds); } } var key = reader.ReadBytes(); var value = reader.ReadBytes(); var codec = (MessageCodec)(Message.CodecMask & attribute); if (codec == MessageCodec.None) { return(ImmutableList <Message> .Empty.Add(new Message(value, key, attribute, offset, messageVersion, timestamp))); } var uncompressedBytes = value.ToUncompressed(codec); using (var messageSetReader = new KafkaReader(uncompressedBytes)) { return(messageSetReader.ReadMessages(codec)); } }