public void Deserialize(byte[] bytes) { using (var ms = new MemoryStream(bytes)) { var reader = new KafkaBinaryReader(ms); this.Version = reader.ReadInt16(); var count = reader.ReadInt32(); this.PartitionAssignmentInfos = new SyncGroupResponsePartitionAssignmentInfo[count]; for (int i = 0; i < count; i++) { this.PartitionAssignmentInfos[i] = new SyncGroupResponsePartitionAssignmentInfo(); var txtLen = reader.ReadInt16(); this.PartitionAssignmentInfos[i].Topic = Encoding.UTF8.GetString(reader.ReadBytes(txtLen)); var size = reader.ReadInt32(); this.PartitionAssignmentInfos[i].Partitions = new int[size]; for (int j = 0; j < size; j++) { var pid = reader.ReadInt32(); this.PartitionAssignmentInfos[i].Partitions[j] = pid; } } var len = reader.ReadInt32(); this.UserData = reader.ReadBytes(len); } }
public SyncGroupResponseMemberAssignmentInfo ParseMemberAssignment() { // deserialize the bytes var info = new SyncGroupResponseMemberAssignmentInfo(); using (var ms = new MemoryStream(this.MemberAssignment)) { var reader = new KafkaBinaryReader(ms); info.Version = reader.ReadInt16(); int count = reader.ReadInt32(); info.PartitionAssignmentInfos = new SyncGroupResponsePartitionAssignmentInfo[count]; for (int i = 0; i < count; i++) { info.PartitionAssignmentInfos[i] = new SyncGroupResponsePartitionAssignmentInfo(); short txtSize = reader.ReadInt16(); byte[] txtBytes = reader.ReadBytes(txtSize); info.PartitionAssignmentInfos[i].Topic = Encoding.UTF8.GetString(txtBytes); int psize = reader.ReadInt32(); info.PartitionAssignmentInfos[i].Partitions = new int[psize]; for (int j = 0; j < psize; j++) { int pid = reader.ReadInt32(); info.PartitionAssignmentInfos[i].Partitions[j] = pid; } } int bytesSize = reader.ReadInt32(); info.UserData = reader.ReadBytes(bytesSize); } return(info); }
public static SyncGroupResponse ParseFrom(KafkaBinaryReader reader) { var size = reader.ReadInt32(); var correlationid = reader.ReadInt32(); var error = reader.ReadInt16(); var count = reader.ReadInt32(); var data = reader.ReadBytes(count); return(new SyncGroupResponse(error, data)); }
public void Deserialize(byte[] bytes) { using (var ms = new MemoryStream(bytes)) { var reader = new KafkaBinaryReader(ms); this.Version = reader.ReadInt16(); var count = reader.ReadInt32(); var topics = new string[count]; for (var i = 0; i < count; i++) { var length = reader.ReadInt16(); var topic = reader.ReadBytes(length); topics[i] = Encoding.UTF8.GetString(topic); } this.Topics = new List <string>(topics); count = reader.ReadInt32(); this.UserData = reader.ReadBytes(count); } }
public static string ReadShortString(KafkaBinaryReader reader, string encoding) { var size = reader.ReadInt16(); if (size < 0) { return(null); } var bytes = reader.ReadBytes(size); Encoding encoder = Encoding.GetEncoding(encoding); return(encoder.GetString(bytes)); }
public static DescribeGroupsResponse ParseFrom(KafkaBinaryReader reader) { var size = reader.ReadInt32(); var correlationid = reader.ReadInt32(); var count = reader.ReadInt32(); var responseInfos = new DescribeGroupsResponseInfo[count]; for (int i = 0; i < count; i++) { var error = reader.ReadInt16(); var groupid = reader.ReadShortString(); var state = reader.ReadShortString(); var protocolType = reader.ReadShortString(); var protocol = reader.ReadShortString(); var count2 = reader.ReadInt32(); var members = new DescribeGroupsResponseMemberInfo[count2]; for (int j = 0; j < count2; j++) { var memberid = reader.ReadShortString(); var clientid = reader.ReadShortString(); var clienthost = reader.ReadShortString(); var metadataSize = reader.ReadInt32(); var metadata = reader.ReadBytes(metadataSize); var assignmentSize = reader.ReadInt32(); var assignment = reader.ReadBytes(assignmentSize); members[j] = new DescribeGroupsResponseMemberInfo(memberid, clientid, clienthost, metadata, assignment); } responseInfos[i] = new DescribeGroupsResponseInfo(error, groupid, state, protocolType, protocol, members); } return(new DescribeGroupsResponse(responseInfos) { CorrelationId = correlationid, Size = size }); }
public static JoinGroupResponse ParseFrom(KafkaBinaryReader reader) { var size = reader.ReadInt32(); var correlationid = reader.ReadInt32(); var error = reader.ReadInt16(); var generationid = reader.ReadInt32(); var groupprotocol = reader.ReadShortString(); var leaderid = reader.ReadShortString(); var memberid = reader.ReadShortString(); var count = reader.ReadInt32(); var members = new JoinGroupResponseMemberInfo[count]; for (int i = 0; i < count; i++) { var id = reader.ReadShortString(); var bytes = reader.ReadInt32(); var metadata = reader.ReadBytes(bytes); members[i] = new JoinGroupResponseMemberInfo(id, metadata); } return(new JoinGroupResponse(error, generationid, groupprotocol, leaderid, memberid, members)); }
public static FetchResponse ParseFrom(KafkaBinaryReader reader) { FetchResponse result = null; DateTime startUtc = DateTime.UtcNow; int size = 0, correlationId = 0, dataCount = 0; try { size = reader.ReadInt32(); Logger.Debug("FetchResponse.ParseFrom: read size byte after " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds, packet size " + size); startUtc = DateTime.UtcNow; byte[] remainingBytes = reader.ReadBytes(size); Logger.Debug("FetchResponse.ParseFrom: read remaining bytes after " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds"); startUtc = DateTime.UtcNow; KafkaBinaryReader dataReader = new KafkaBinaryReader(new MemoryStream(remainingBytes)); correlationId = dataReader.ReadInt32(); dataCount = dataReader.ReadInt32(); var data = new FetchResponseTopicInfo[dataCount]; // !!! improvement !!! // just receive the bytes, and try to parse them later // directly parse the record here, or just keep the bytes to speed up the fetch response for (int i = 0; i < dataCount; i++) { var topic = dataReader.ReadShortString(); var partitionCount = dataReader.ReadInt32(); startUtc = DateTime.UtcNow; var partitions = new FetchResponsePartitionInfo[partitionCount]; for (int j = 0; j < partitionCount; j++) { var partition = dataReader.ReadInt32(); var error = dataReader.ReadInt16(); var highWatermark = dataReader.ReadInt64(); var messageSetSize = dataReader.ReadInt32(); var messageSetBytes = dataReader.ReadBytes(messageSetSize); Logger.Debug("FetchResponse.ParseFrom: topic " + topic + " partition " + partition + " should get records in " + messageSetSize + " bytes, error " + error + " watermark " + highWatermark); partitions[j] = new FetchResponsePartitionInfo(partition, error, highWatermark, messageSetBytes); } Logger.Debug("FetchResponse.ParseFrom: read " + partitionCount + " partitions for segment " + (i + 1) + " use " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds"); data[i] = new FetchResponseTopicInfo(topic, partitions); } result = new FetchResponse(correlationId, data, size); Logger.Debug("FetchResponse.ParseFrom: read bytes into structure complete after " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds"); } catch (OutOfMemoryException mex) { Logger.Error( string.Format( "OOM Error. Data values were: size: {0}, correlationId: {1}, dataCound: {2}.\r\nFull Stack of exception: {3}", size, correlationId, dataCount, mex.StackTrace)); throw; } catch (Exception e) { Logger.Debug("FetchResponse.ParseFrom: parse response failed\r\n" + e); throw; } return(result); }