/// <summary> /// Decode messages from a payload and assign it a given kafka offset. /// </summary> /// <param name="offset">The offset represting the log entry from kafka of this message.</param> /// <param name="payload">The byte[] encode as a message from kafka.</param> /// <returns>Enumerable representing stream of messages decoded from byte[].</returns> /// <remarks>The return type is an Enumerable as the message could be a compressed message set.</remarks> public static IEnumerable<Message> DecodeMessage(long offset, byte[] payload) { var crc = payload.Take(4); var stream = new ReadByteStream(payload.Skip(4)); if (crc.SequenceEqual(Crc32.ComputeHash(stream.Payload)) == false) throw new FailCrcCheckException("Payload did not match CRC validation."); var message = new Message { Meta = new MessageMetadata { Offset = offset }, MagicNumber = stream.ReadByte(), Attribute = stream.ReadByte(), Key = stream.ReadIntString() }; var codec = (MessageCodec)(ProtocolConstants.AttributeCodeMask & message.Attribute); switch (codec) { case MessageCodec.CodecNone: message.Value = stream.ReadIntString(); yield return message; break; case MessageCodec.CodecGzip: var gZipData = stream.ReadIntPrefixedBytes(); foreach (var m in DecodeMessageSet(Compression.Unzip(gZipData))) { yield return m; } break; default: throw new NotSupportedException(string.Format("Codec type of {0} is not supported.", codec)); } }
public static Broker FromStream(ReadByteStream stream) { return new Broker { BrokerId = stream.ReadInt(), Host = stream.ReadInt16String(), Port = stream.ReadInt() }; }
public static Partition FromStream(ReadByteStream stream) { var partition = new Partition { ErrorCode = stream.ReadInt16(), PartitionId = stream.ReadInt(), LeaderId = stream.ReadInt(), Replicas = new List<int>(), Isrs = new List<int>() }; var numReplicas = stream.ReadInt(); for (int i = 0; i < numReplicas; i++) { partition.Replicas.Add(stream.ReadInt()); } var numIsr = stream.ReadInt(); for (int i = 0; i < numIsr; i++) { partition.Isrs.Add(stream.ReadInt()); } return partition; }
/// <summary> /// Decode a byte[] that represents a collection of messages. /// </summary> /// <param name="messageSet">The byte[] encode as a message set from kafka.</param> /// <returns>Enumerable representing stream of messages decoded from byte[]</returns> public static IEnumerable<Message> DecodeMessageSet(byte[] messageSet) { var stream = new ReadByteStream(messageSet); while (stream.HasData) { var offset = stream.ReadLong(); foreach (var message in DecodeMessage(offset, stream.ReadIntPrefixedBytes())) { yield return message; } } }
public static Topic FromStream(ReadByteStream stream) { var topic = new Topic { ErrorCode = stream.ReadInt16(), Name = stream.ReadInt16String(), Partitions = new List<Partition>() }; var numPartitions = stream.ReadInt(); for (int i = 0; i < numPartitions; i++) { topic.Partitions.Add(Partition.FromStream(stream)); } return topic; }
/// <summary> /// Decode a byte[] that represents a collection of messages. /// </summary> /// <param name="messageSet">The byte[] encode as a message set from kafka.</param> /// <returns>Enumerable representing stream of messages decoded from byte[]</returns> public static IEnumerable<Message> DecodeMessageSet(byte[] messageSet) { var stream = new ReadByteStream(messageSet); while (stream.HasData) { var offset = stream.ReadLong(); var size = stream.ReadInt(); var payload = stream.ReadBytesFromStream(size); if (payload.Length < size) throw new InsufficientDataException(payload.Length, size); foreach (var message in DecodeMessage(offset, payload)) { yield return message; } } }