public void FetchFrom(KafkaReader reader) { Offset = reader.ReadInt64(); //move 8 MessageSize = reader.ReadInt32(); //move 4 Message = new MessageSetItem(); Message.FetchFrom(reader); }
public void FetchFrom(KafkaReader reader) { Partition = reader.ReadInt32(); Offset = reader.ReadInt64(); Metadata = reader.ReadString(); ErrorCode = (ErrorCode)reader.ReadInt16(); }
protected override void DeserializeContent(KafkaReader reader) { ReplicaId = reader.ReadInt32(); MaxWaitTime = reader.ReadInt32(); MinBytes = reader.ReadInt32(); TopicPartitions = reader.ReadArray <FetchRequestTopicPartition>(); }
protected override void DeserializeContent(KafkaReader reader) { GroupId = reader.ReadString(); GenerationId = reader.ReadInt32(); MemberId = reader.ReadString(); GroupAssignments = reader.ReadArray <SyncGroupGroupAssignment>(); }
public void FetchFrom(KafkaReader reader) { Partition = reader.ReadInt32(); MessageSetSize = reader.ReadInt32(); MessageSet = new MessageSet(MessageSetSize); MessageSet.FetchFrom(reader); }
protected override void DeserializeContent(KafkaReader reader) { ConsumerGroup = reader.ReadString(); ConsumerGroupGenerationId = reader.ReadInt32(); ConsumerId = reader.ReadString(); TopicPartitions = reader.ReadArray <OffsetCommitRequestTopicPartitionV1>(); }
private IEnumerable <MessageSetDetail> Decompress(IEnumerable <MessageSetDetail> sets) { foreach (var item in sets) { if (item.Message.Attributes == MessageCodec.None) { yield return(item); } else if (item.Message.Attributes == MessageCodec.Snappy) { yield return(item); } else if (item.Message.Attributes == MessageCodec.Gzip) { var buffer = Compress.GZip.Decompress(item.Message.Value); using (var stream = new MemoryStream(buffer)) using (var reader = new KafkaReader(stream)) { var set = new GZipMessageSet(buffer.Length); set.FetchFrom(reader); foreach (var item2 in set.Items) { yield return(item2); } } } else { throw new NotImplementedException(); } } }
public void FetchFrom(KafkaReader reader) { if (_messageSetSize == 0) { Items = new MessageSetDetail[0]; return; } var previousPosition = reader.PositionProceeded; var items = new List <MessageSetDetail>(32); while (reader.PositionProceeded - previousPosition < _messageSetSize) { //var item = new MessageSetDetail(); //item.FetchFrom(reader); var maxBytes = _messageSetSize - (Int32)(reader.PositionProceeded - previousPosition); MessageSetDetail item; if (!MessageSetDetail.TryFetchFrom(reader, maxBytes, out item)) { break; } items.Add(item); } var restBytes = _messageSetSize - (Int32)(reader.PositionProceeded - previousPosition); if (restBytes > 0) { reader.DropBytes(restBytes); } Items = Decompress(items).ToArray(); }
public void FetchFrom(KafkaReader reader) { Partition = reader.ReadInt32(); Offset = reader.ReadInt64(); TimeStamp = reader.ReadInt64(); Metadata = reader.ReadString(); }
protected override void DeserializeContent(KafkaReader reader) { ErrorCode = (ErrorCode)reader.ReadInt16(); CoordinatorId = reader.ReadInt32(); CoordinatorHost = reader.ReadString(); CoordinatorPort = reader.ReadInt32(); }
public string Get() { KafkaReader reader = new KafkaReader(_consumerconf, topic); string message = reader.Reader(); return(message); }
public void WhenMessageIsExactlyTheSizeOfBufferThenMessageIsDecoded() { // arrange var expectedPayloadBytes = new ArraySegment <byte>(new byte[] { 1, 2, 3, 4 }); using (var writer = new KafkaWriter()) { writer.Write(0L); using (writer.MarkForLength()) { new Message(expectedPayloadBytes, new ArraySegment <byte>(new byte[] { 0 }), 0, version: 0).WriteTo(writer); } var segment = writer.ToSegment(); // act/assert using (var reader = new KafkaReader(segment)) { var messages = reader.ReadMessages(0); var actualPayload = messages.First().Value; // assert var expectedPayload = new byte[] { 1, 2, 3, 4 }; CollectionAssert.AreEqual(expectedPayload, actualPayload); } } }
public void Deserialize(Stream stream) { using (var reader = new KafkaReader(stream)) { Size = reader.ReadInt32(); CorrelationId = reader.ReadInt32(); DeserializeContent(reader); } }
protected override void DeserializeContent(KafkaReader reader) { GroupId = reader.ReadString(); SessionTimeout = reader.ReadInt32(); MemberId = reader.ReadString(); ProtocolType = reader.ReadString(); GroupProtocols = reader.ReadArray <JoinGroupRequestGroupProtocol>(); }
public void FetchFrom(KafkaReader reader) { Crc = reader.ReadInt32(); //move 4 MagicByte = reader.ReadByte(); //move 1 Attributes = (MessageCodec)reader.ReadByte(); //move 1 Key = reader.ReadBytes(); //move 4 + len(bytes) if not null Value = reader.ReadBytes(); //move 4 + len(bytes) if not null }
public void FetchFrom(KafkaReader reader) { PartitionErrorCode = (ErrorCode)reader.ReadInt16(); PartitionId = reader.ReadInt32(); Leader = reader.ReadInt32(); Replicas = reader.ReadInt32Array(); Isr = reader.ReadInt32Array(); }
public void FetchFrom(KafkaReader reader) { ErrorCode = (ErrorCode)reader.ReadInt16(); GroupId = reader.ReadString(); State = reader.ReadString(); ProtocolType = reader.ReadString(); Protocol = reader.ReadString(); Members = reader.ReadArray <DescribeGroupsResponseMember>(); }
protected override void DeserializeContent(KafkaReader reader) { ErrorCode = (ErrorCode)reader.ReadInt16(); GenerationId = reader.ReadInt32(); GroupProtocol = reader.ReadString(); LeaderId = reader.ReadString(); MemberId = reader.ReadString(); Members = reader.ReadArray <JoinGroupResponseMember>(); }
public void FetchFrom(KafkaReader reader) { Partition = reader.ReadInt32(); ErrorCode = (ErrorCode)reader.ReadInt16(); HighwaterMarkOffset = reader.ReadInt64(); MessageSetSize = reader.ReadInt32(); MessageSet = new MessageSet(MessageSetSize); // Min length per MessageSet: 8 + 4 + ( 4 + 1 + 1 + 4 + key.length + 4 + value.length) is 26 // It means 2 msg has minimal MessageBody = 26*2 = 52 MessageSet.FetchFrom(reader); }
public void FetchFrom(KafkaReader reader) { MemberId = reader.ReadString(); ClientId = reader.ReadString(); ClientHost = reader.ReadString(); //MemberMetadata = reader.ReadBytes(); //MemberAssignment = reader.ReadBytes(); MemberMetadata = new JoinGroupMemberMetadata(); MemberMetadata.FetchFrom(reader); MemberAssignment = new SyncGroupMemberAssignment(); MemberAssignment.FetchFrom(reader); }
protected override void DeserializeContent(KafkaReader reader) { //Possible Error Codes: //* GROUP_COORDINATOR_NOT_AVAILABLE (15) //* NOT_COORDINATOR_FOR_GROUP (16) //* ILLEGAL_GENERATION (22) //* UNKNOWN_MEMBER_ID (25) //* REBALANCE_IN_PROGRESS (27) //* GROUP_AUTHORIZATION_FAILED (30) ErrorCode = (ErrorCode)reader.ReadInt16(); MemberAssignment = new SyncGroupMemberAssignment(); MemberAssignment.FetchFrom(reader); }
public void DecodeMessageSetShouldHandleResponseWithMaxBufferSizeHit() { using (var reader = new KafkaReader(MessageHelper.FetchResponseMaxBytesOverflow)) { //This message set has a truncated message bytes at the end of it var result = reader.ReadMessages(0); var message = result.First().Value.ToUtf8String(); Assert.That(message, Is.EqualTo("test")); Assert.That(result.Count, Is.EqualTo(529)); } }
public void Int64Tests(long expectedValue, byte[] givenBytes) { for (var offset = 0; offset <= 8; offset++) { // arrange var binaryReader = new KafkaReader(OffsetBytes(givenBytes, offset)); // act var actualValue = binaryReader.ReadInt64(); // assert Assert.That(actualValue, Is.EqualTo(expectedValue)); } }
public void Deserialize(Stream stream) { using (var reader = new KafkaReader(stream)) { Size = reader.ReadInt32(); var apiKey = (ApiKey)reader.ReadInt16(); if (ApiKey != apiKey) { throw new InvalidOperationException("Request type definition error"); } ApiVersion = reader.ReadInt16(); CorrelationId = reader.ReadInt32(); ClientId = reader.ReadString(); DeserializeContent(reader); } }
public void DecodeMessageShouldThrowWhenCrcFails() { var testMessage = new Message(value: "kafka test message.", key: "test"); using (var writer = new KafkaWriter()) { testMessage.WriteTo(writer); var encoded = writer.ToSegment(false); encoded.Array[encoded.Offset] += 1; using (var reader = new KafkaReader(encoded)) { Assert.Throws <CrcValidationException>(() => reader.ReadMessage(encoded.Count, 0).First()); } } }
public static Response DeserializeFrom(Stream stream, ApiKey apiKey) { using (var reader = new KafkaReader(stream)) { var size = reader.ReadInt32(); var typeName = apiKey.ToString().Replace("Request", "Response"); var fullTypeNaem = (Int32)apiKey < 10 ? "Chuye.Kafka.Protocol." + typeName : "Chuye.Kafka.Protocol.Management." + typeName; var type = Type.GetType(fullTypeNaem); var instance = (Response)Activator.CreateInstance(type); instance.Size = size; instance.CorrelationId = reader.ReadInt32(); instance.DeserializeContent(reader); return(instance); } }
public void EnsureMessageEncodeAndDecodeAreCompatible(string key, string value) { var testMessage = new Message(key: key, value: value); using (var writer = new KafkaWriter()) { testMessage.WriteTo(writer); var encoded = writer.ToSegment(false); using (var reader = new KafkaReader(encoded)) { var result = reader.ReadMessage(encoded.Count, 0).First(); Assert.That(testMessage.Key, Is.EqualTo(result.Key)); Assert.That(testMessage.Value, Is.EqualTo(result.Value)); } } }
private static IKafkaReader ReadHeader(ArraySegment <byte> data, out ApiKey apiKey, out IRequestContext context) { var reader = new KafkaReader(data); try { apiKey = (ApiKey)reader.ReadInt16(); var version = reader.ReadInt16(); var correlationId = reader.ReadInt32(); var clientId = reader.ReadString(); context = new RequestContext(correlationId, version, clientId); } catch { apiKey = 0; context = null; reader.Dispose(); reader = null; } return(reader); }
public static Boolean TryFetchFrom(KafkaReader reader, Int32 maxBytes, out MessageSetDetail item) { if (maxBytes < 12) { item = null; return(false); } item = new MessageSetDetail(); item.Offset = reader.ReadInt64(); //move 8 item.MessageSize = reader.ReadInt32(); //move 4 if (item.MessageSize > maxBytes - 12) { item = null; return(false); } item.Message = new MessageSetItem(); item.Message.FetchFrom(reader); return(true); }
public void FetchFrom(KafkaReader reader) { //Version = reader.ReadInt16(); //Topics = reader.ReadStrings(); //UserData = reader.ReadBytes(); var protocolMetadata = reader.ReadBytes(); if (protocolMetadata == null) { return; } using (var stream = new MemoryStream(protocolMetadata)) using (var reader2 = new KafkaReader(stream)) { Version = reader2.ReadInt16(); Topics = reader2.ReadStrings(); UserData = reader2.ReadBytes(); } }