protected override void SerializeContent(KafkaWriter writer) { writer.Write(GroupId); writer.Write(GenerationId); writer.Write(MemberId); writer.Write(GroupAssignments); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write(ReplicaId); writer.Write(MaxWaitTime); writer.Write(MinBytes); writer.Write(TopicPartitions); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write(ConsumerGroup); writer.Write(ConsumerGroupGenerationId); writer.Write(ConsumerId); writer.Write(TopicPartitions); }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); writer.Write(Offset); writer.Write(TimeStamp); writer.Write(Metadata); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write((Int16)ErrorCode); writer.Write(CoordinatorId); writer.Write(CoordinatorHost); writer.Write(CoordinatorPort); }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); writer.Write(Offset); writer.Write(Metadata); writer.Write((Int16)ErrorCode); }
public void SaveTo(KafkaWriter writer) { writer.Write((Int16)PartitionErrorCode); writer.Write(PartitionId); writer.Write(Leader); writer.Write(Replicas); writer.Write(Isr); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write(GroupId); writer.Write(SessionTimeout); writer.Write(MemberId); writer.Write(ProtocolType); writer.Write(GroupProtocols); }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); writer.Write((Int16)ErrorCode); writer.Write(HighwaterMarkOffset); writer.Write(MessageSetSize); MessageSet.SaveTo(writer); }
public void SaveTo(KafkaWriter writer) { var crcWriter = new KafkaCrc32Writer(writer); crcWriter.MarkAsStart(); writer.Write(MagicByte); writer.Write((Byte)Attributes); writer.Write(Key); writer.Write(Value); Crc = crcWriter.Caculate(); }
public void SaveTo(KafkaWriter writer) { writer.Write(MemberId); if (MemberAssignment != null) { MemberAssignment.SaveTo(writer); } else { writer.Write((Byte[])null); } }
public void SaveTo(KafkaWriter writer) { writer.Write(MemberId); if (MemberMetadata != null) { MemberMetadata.SaveTo(writer); } else { writer.Write((Byte[])null); } }
public void Serialize(Stream stream) { using (var writer = new KafkaWriter(stream)) { var lengthWriter = new KafkaLengthWriter(writer); lengthWriter.MarkAsStart(); writer.Write((Int16)ApiKey); writer.Write(ApiVersion); writer.Write(CorrelationId); writer.Write(ClientId); SerializeContent(writer); Size = lengthWriter.Caculate(); } }
public void WhenMessageIsExactlyTheSizeOfBufferThenMessageIsDecoded() { // arrange var expectedPayloadBytes = new ArraySegment <byte>(new byte[] { 1, 2, 3, 4 }); using (var writer = new KafkaWriter()) { writer.Write(0L); using (writer.MarkForLength()) { new Message(expectedPayloadBytes, new ArraySegment <byte>(new byte[] { 0 }), 0, version: 0).WriteTo(writer); } var segment = writer.ToSegment(); // act/assert using (var reader = new KafkaReader(segment)) { var messages = reader.ReadMessages(0); var actualPayload = messages.First().Value; // assert var expectedPayload = new byte[] { 1, 2, 3, 4 }; CollectionAssert.AreEqual(expectedPayload, actualPayload); } } }
public static ArraySegment <byte> EncodeResponseBytes <T>(IRequestContext context, T response) where T : IResponse { using (var writer = new KafkaWriter()) { // From http://kafka.apache.org/protocol.html#protocol_messages // // Response Header => correlation_id // correlation_id => INT32 -- The user-supplied value passed in with the request writer.Write(context.CorrelationId); // ReSharper disable once UnusedVariable var isEncoded = TryEncodeResponse(writer, context, response as ProduceResponse) || TryEncodeResponse(writer, context, response as FetchResponse) || TryEncodeResponse(writer, context, response as OffsetsResponse) || TryEncodeResponse(writer, context, response as MetadataResponse) || TryEncodeResponse(writer, context, response as OffsetCommitResponse) || TryEncodeResponse(writer, context, response as OffsetFetchResponse) || TryEncodeResponse(writer, context, response as GroupCoordinatorResponse) || TryEncodeResponse(writer, context, response as JoinGroupResponse) || TryEncodeResponse(writer, context, response as HeartbeatResponse) || TryEncodeResponse(writer, context, response as LeaveGroupResponse) || TryEncodeResponse(writer, context, response as SyncGroupResponse) || TryEncodeResponse(writer, context, response as DescribeGroupsResponse) || TryEncodeResponse(writer, context, response as ListGroupsResponse) || TryEncodeResponse(writer, context, response as SaslHandshakeResponse) || TryEncodeResponse(writer, context, response as ApiVersionsResponse) || TryEncodeResponse(writer, context, response as CreateTopicsResponse) || TryEncodeResponse(writer, context, response as DeleteTopicsResponse); return(writer.ToSegment()); } }
public void SaveTo(KafkaWriter writer) { //writer.Write(Version); //writer.Write(Topics); //writer.Write(UserData); using (var stream = new MemoryStream(4096)) { var writer2 = new KafkaWriter(stream); writer2.Write(Version); writer2.Write(Topics); writer2.Write(UserData); writer2.Dispose(); stream.Seek(0L, SeekOrigin.Begin); var protocolMetadata = stream.ToArray(); writer.Write(protocolMetadata); } }
public void SaveTo(KafkaWriter writer) { //writer.Write(Version); //writer.Write(PartitionAssignments); //writer.Write(UserData); using (var stream = new MemoryStream(4096)) { var writer2 = new KafkaWriter(stream); writer2.Write(Version); writer2.Write(PartitionAssignments); writer2.Write(UserData); writer2.Dispose(); stream.Seek(0L, SeekOrigin.Begin); var memberAssignment = stream.ToArray(); writer.Write(memberAssignment); } }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); var lengthWriter = new KafkaLengthWriter(writer); lengthWriter.MarkAsStart(); MessageSet.SaveTo(writer); MessageSetSize = lengthWriter.Caculate(); }
public void SaveTo(KafkaWriter writer) { writer.Write(Offset); //writer.Write(MessageSize); var lengthWriter = new KafkaLengthWriter(writer); lengthWriter.MarkAsStart(); Message.SaveTo(writer); MessageSize = lengthWriter.Caculate(); }
public void Int64Tests(long number, byte[] expectedBytes) { // arrange var writer = new KafkaWriter(); // act writer.Write(number); // assert var actualBytes = writer.ToSegment(false); Assert.That(actualBytes.ToArray(), Is.EqualTo(expectedBytes)); }
public void StringTests(string value, byte[] expectedBytes) { // arrange var writer = new KafkaWriter(); // act writer.Write(value); // assert var actualBytes = writer.ToSegment(false); Assert.That(actualBytes.ToArray(), Is.EqualTo(expectedBytes)); }
public void DecodeMessageShouldThrowWhenCrcFails() { var testMessage = new Message(value: "kafka test message.", key: "test"); using (var writer = new KafkaWriter()) { writer.Write(testMessage, false); var encoded = writer.ToBytesNoLength(); encoded[0] += 1; using (var reader = new BigEndianBinaryReader(encoded)) { Assert.Throws <CrcValidationException>(() => reader.ReadMessage(encoded.Length, 0).First()); } } }
public void SaveTo(KafkaWriter writer) { writer.Write((Int16)ErrorCode); writer.Write(GroupId); writer.Write(State); writer.Write(ProtocolType); writer.Write(Protocol); writer.Write(Members); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write((Int16)ErrorCode); writer.Write(GenerationId); writer.Write(GroupProtocol); writer.Write(LeaderId); writer.Write(MemberId); writer.Write(Members); }
public void SaveTo(KafkaWriter writer) { writer.Write(MemberId); writer.Write(ClientId); writer.Write(ClientHost); //writer.Write(MemberMetadata); //writer.Write(MemberAssignment); if (MemberMetadata == null) { writer.Write((Byte[])null); } else { MemberMetadata.SaveTo(writer); } if (MemberAssignment == null) { writer.Write((Byte[])null); } else { MemberAssignment.SaveTo(writer); } }
public void EnsureMessageEncodeAndDecodeAreCompatible(string key, string value) { var testMessage = new Message(key: key, value: value); using (var writer = new KafkaWriter()) { writer.Write(testMessage, false); var encoded = writer.ToBytesNoLength(); using (var reader = new BigEndianBinaryReader(encoded)) { var result = reader.ReadMessage(encoded.Length, 0).First(); Assert.That(testMessage.Key, Is.EqualTo(result.Key)); Assert.That(testMessage.Value, Is.EqualTo(result.Value)); } } }
public void WhenMessageIsTruncatedThenBufferUnderRunExceptionIsThrown() { // arrange var offset = (long)0; var message = new byte[] { }; var messageSize = message.Length + 1; using (var writer = new KafkaWriter()) { writer.Write(offset) .Write(messageSize) .Write(message); var bytes = writer.ToBytes(); using (var reader = new BigEndianBinaryReader(bytes)) { // act/assert Assert.Throws <BufferUnderRunException>(() => reader.ReadMessages()); } } }
public void WhenMessageIsTruncatedThenBufferUnderRunExceptionIsThrown() { // arrange var message = new byte[] { }; var messageSize = message.Length + 1; using (var writer = new KafkaWriter()) { writer.Write(0L) .Write(messageSize) .Write(new ArraySegment <byte>(message)); var segment = writer.ToSegment(); using (var reader = new KafkaReader(segment)) { // act/assert Assert.Throws <BufferUnderRunException>(() => reader.ReadMessages(0)); } } }
public void EncodeMessageSetEncodesMultipleMessages() { //expected generated from python library var expected = new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 45, 70, 24, 62, 0, 0, 0, 0, 0, 1, 49, 0, 0, 0, 1, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 90, 65, 40, 168, 0, 0, 0, 0, 0, 1, 49, 0, 0, 0, 1, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 195, 72, 121, 18, 0, 0, 0, 0, 0, 1, 49, 0, 0, 0, 1, 50 }; var messages = new[] { new Message("0", "1"), new Message("1", "1"), new Message("2", "1") }; using (var writer = new KafkaWriter()) { writer.Write(messages, false); var result = writer.ToBytesNoLength(); Assert.That(expected, Is.EqualTo(result)); } }
public void SaveTo(KafkaWriter writer) { writer.Write(TopicName); writer.Write(Details); }