protected override void SerializeContent(KafkaWriter writer) { writer.Write(GroupId); writer.Write(GenerationId); writer.Write(MemberId); writer.Write(GroupAssignments); }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); writer.Write(Offset); writer.Write(Metadata); writer.Write((Int16)ErrorCode); }
public void ReadShouldStackReadRequestsAndReturnOneAtATime() { var endpoint = Endpoint.Resolve(TestConfig.ServerUri(), TestConfig.InfoLog); using (var server = new FakeTcpServer(TestConfig.InfoLog, endpoint.IP.Port)) { var messages = new[] { "test1", "test2", "test3", "test4" }; var expectedLength = "test1".Length; var payload = new KafkaWriter().Write(messages); using (var socket = new TcpSocket(endpoint, log: TestConfig.WarnLog)) { var tasks = messages.Select(x => socket.ReadAsync(x.Length, CancellationToken.None)).ToArray(); var send = server.SendDataAsync(payload.ToBytes()); Task.WaitAll(tasks); foreach (var task in tasks) { Assert.That(task.Result.Length, Is.EqualTo(expectedLength)); } } } }
public void WhenMessageIsExactlyTheSizeOfBufferThenMessageIsDecoded() { // arrange var expectedPayloadBytes = new ArraySegment <byte>(new byte[] { 1, 2, 3, 4 }); using (var writer = new KafkaWriter()) { writer.Write(0L); using (writer.MarkForLength()) { new Message(expectedPayloadBytes, new ArraySegment <byte>(new byte[] { 0 }), 0, version: 0).WriteTo(writer); } var segment = writer.ToSegment(); // act/assert using (var reader = new KafkaReader(segment)) { var messages = reader.ReadMessages(0); var actualPayload = messages.First().Value; // assert var expectedPayload = new byte[] { 1, 2, 3, 4 }; CollectionAssert.AreEqual(expectedPayload, actualPayload); } } }
protected override void SerializeContent(KafkaWriter writer) { writer.Write(ReplicaId); writer.Write(MaxWaitTime); writer.Write(MinBytes); writer.Write(TopicPartitions); }
public static ArraySegment <byte> EncodeResponseBytes <T>(IRequestContext context, T response) where T : IResponse { using (var writer = new KafkaWriter()) { // From http://kafka.apache.org/protocol.html#protocol_messages // // Response Header => correlation_id // correlation_id => INT32 -- The user-supplied value passed in with the request writer.Write(context.CorrelationId); // ReSharper disable once UnusedVariable var isEncoded = TryEncodeResponse(writer, context, response as ProduceResponse) || TryEncodeResponse(writer, context, response as FetchResponse) || TryEncodeResponse(writer, context, response as OffsetsResponse) || TryEncodeResponse(writer, context, response as MetadataResponse) || TryEncodeResponse(writer, context, response as OffsetCommitResponse) || TryEncodeResponse(writer, context, response as OffsetFetchResponse) || TryEncodeResponse(writer, context, response as GroupCoordinatorResponse) || TryEncodeResponse(writer, context, response as JoinGroupResponse) || TryEncodeResponse(writer, context, response as HeartbeatResponse) || TryEncodeResponse(writer, context, response as LeaveGroupResponse) || TryEncodeResponse(writer, context, response as SyncGroupResponse) || TryEncodeResponse(writer, context, response as DescribeGroupsResponse) || TryEncodeResponse(writer, context, response as ListGroupsResponse) || TryEncodeResponse(writer, context, response as SaslHandshakeResponse) || TryEncodeResponse(writer, context, response as ApiVersionsResponse) || TryEncodeResponse(writer, context, response as CreateTopicsResponse) || TryEncodeResponse(writer, context, response as DeleteTopicsResponse); return(writer.ToSegment()); } }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); writer.Write(Offset); writer.Write(TimeStamp); writer.Write(Metadata); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write((Int16)ErrorCode); writer.Write(CoordinatorId); writer.Write(CoordinatorHost); writer.Write(CoordinatorPort); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write(ConsumerGroup); writer.Write(ConsumerGroupGenerationId); writer.Write(ConsumerId); writer.Write(TopicPartitions); }
public void ReadShouldNotLoseDataFromStreamOverMultipleReads() { var endpoint = Endpoint.Resolve(TestConfig.ServerUri(), TestConfig.InfoLog); using (var server = new FakeTcpServer(TestConfig.InfoLog, endpoint.IP.Port)) using (var test = new TcpSocket(endpoint, log: TestConfig.InfoLog)) { const int firstMessage = 99; const string secondMessage = "testmessage"; var bytes = Encoding.UTF8.GetBytes(secondMessage); var payload = new KafkaWriter() .Write(firstMessage) .Write(bytes, false); //send the combined payload var send = server.SendDataAsync(payload.ToBytesNoLength()); var firstResponse = test.ReadAsync(4, CancellationToken.None).Result.ToInt32(); Assert.That(firstResponse, Is.EqualTo(firstMessage)); var secondResponse = Encoding.ASCII.GetString(test.ReadAsync(secondMessage.Length, CancellationToken.None).Result); Assert.That(secondResponse, Is.EqualTo(secondMessage)); } }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); writer.Write((Int16)ErrorCode); writer.Write(HighwaterMarkOffset); writer.Write(MessageSetSize); MessageSet.SaveTo(writer); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write(GroupId); writer.Write(SessionTimeout); writer.Write(MemberId); writer.Write(ProtocolType); writer.Write(GroupProtocols); }
public void SaveTo(KafkaWriter writer) { writer.Write((Int16)PartitionErrorCode); writer.Write(PartitionId); writer.Write(Leader); writer.Write(Replicas); writer.Write(Isr); }
public void SaveTo(KafkaWriter writer) { writer.Write(Partition); var lengthWriter = new KafkaLengthWriter(writer); lengthWriter.MarkAsStart(); MessageSet.SaveTo(writer); MessageSetSize = lengthWriter.Caculate(); }
protected override void SerializeContent(KafkaWriter writer) { writer.Write((Int16)ErrorCode); writer.Write(GenerationId); writer.Write(GroupProtocol); writer.Write(LeaderId); writer.Write(MemberId); writer.Write(Members); }
public void SaveTo(KafkaWriter writer) { writer.Write((Int16)ErrorCode); writer.Write(GroupId); writer.Write(State); writer.Write(ProtocolType); writer.Write(Protocol); writer.Write(Members); }
public virtual void SaveTo(KafkaWriter writer) { //N.B., MessageSets are not preceded by an int32 like other array elements in the protocol. //writer.Write(Items.Length); //Error //writer.Write(Items); //Error foreach (var item in Items) { item.SaveTo(writer); } }
public void SaveTo(KafkaWriter writer) { writer.Write(Offset); //writer.Write(MessageSize); var lengthWriter = new KafkaLengthWriter(writer); lengthWriter.MarkAsStart(); Message.SaveTo(writer); MessageSize = lengthWriter.Caculate(); }
public void Serialize(Stream stream) { using (var writer = new KafkaWriter(stream)) { var lengthWriter = new KafkaLengthWriter(writer); lengthWriter.MarkAsStart(); writer.Write(CorrelationId); SerializeContent(writer); Size = lengthWriter.Caculate(); } }
public void SaveTo(KafkaWriter writer) { var crcWriter = new KafkaCrc32Writer(writer); crcWriter.MarkAsStart(); writer.Write(MagicByte); writer.Write((Byte)Attributes); writer.Write(Key); writer.Write(Value); Crc = crcWriter.Caculate(); }
public void StringTests(string value, byte[] expectedBytes) { // arrange var writer = new KafkaWriter(); // act writer.Write(value); // assert var actualBytes = writer.ToSegment(false); Assert.That(actualBytes.ToArray(), Is.EqualTo(expectedBytes)); }
public void SaveTo(KafkaWriter writer) { writer.Write(MemberId); if (MemberMetadata != null) { MemberMetadata.SaveTo(writer); } else { writer.Write((Byte[])null); } }
public void Int64Tests(long number, byte[] expectedBytes) { // arrange var writer = new KafkaWriter(); // act writer.Write(number); // assert var actualBytes = writer.ToSegment(false); Assert.That(actualBytes.ToArray(), Is.EqualTo(expectedBytes)); }
public void SaveTo(KafkaWriter writer) { writer.Write(MemberId); if (MemberAssignment != null) { MemberAssignment.SaveTo(writer); } else { writer.Write((Byte[])null); } }
public void WriteAsyncTest() { var config = new Dictionary <string, object> { { "bootstrap.servers", "192.168.0.104:9092" } }; IMessageWriter <Book> writer = new KafkaWriter <Book>(config); Book book = new Book() { Acedamics = "ymdl", Author = "atul" }; var res = writer.WriteAsync(book, "topic-book").Result; }
public void DecodeMessageShouldThrowWhenCrcFails() { var testMessage = new Message(value: "kafka test message.", key: "test"); using (var writer = new KafkaWriter()) { writer.Write(testMessage, false); var encoded = writer.ToBytesNoLength(); encoded[0] += 1; using (var reader = new BigEndianBinaryReader(encoded)) { Assert.Throws <CrcValidationException>(() => reader.ReadMessage(encoded.Length, 0).First()); } } }
public void DecodeMessageShouldThrowWhenCrcFails() { var testMessage = new Message(value: "kafka test message.", key: "test"); using (var writer = new KafkaWriter()) { testMessage.WriteTo(writer); var encoded = writer.ToSegment(false); encoded.Array[encoded.Offset] += 1; using (var reader = new KafkaReader(encoded)) { Assert.Throws <CrcValidationException>(() => reader.ReadMessage(encoded.Count, 0).First()); } } }
public void EnsureMessageEncodeAndDecodeAreCompatible(string key, string value) { var testMessage = new Message(key: key, value: value); using (var writer = new KafkaWriter()) { writer.Write(testMessage, false); var encoded = writer.ToBytesNoLength(); using (var reader = new BigEndianBinaryReader(encoded)) { var result = reader.ReadMessage(encoded.Length, 0).First(); Assert.That(testMessage.Key, Is.EqualTo(result.Key)); Assert.That(testMessage.Value, Is.EqualTo(result.Value)); } } }
public void EnsureMessageEncodeAndDecodeAreCompatible(string key, string value) { var testMessage = new Message(key: key, value: value); using (var writer = new KafkaWriter()) { testMessage.WriteTo(writer); var encoded = writer.ToSegment(false); using (var reader = new KafkaReader(encoded)) { var result = reader.ReadMessage(encoded.Count, 0).First(); Assert.That(testMessage.Key, Is.EqualTo(result.Key)); Assert.That(testMessage.Value, Is.EqualTo(result.Value)); } } }
public void SaveTo(KafkaWriter writer) { //writer.Write(Version); //writer.Write(Topics); //writer.Write(UserData); using (var stream = new MemoryStream(4096)) { var writer2 = new KafkaWriter(stream); writer2.Write(Version); writer2.Write(Topics); writer2.Write(UserData); writer2.Dispose(); stream.Seek(0L, SeekOrigin.Begin); var protocolMetadata = stream.ToArray(); writer.Write(protocolMetadata); } }