public static OffsetFetchResponse ParseFrom(KafkaBinaryReader reader) { var size = reader.ReadInt32(); var correlationid = reader.ReadInt32(); var count = reader.ReadInt32(); var data = new Dictionary <string, List <OffsetFetchResponseInfo> >(); for (int i = 0; i < count; i++) { var topic = reader.ReadShortString(); var num = reader.ReadInt32(); for (int j = 0; j < num; j++) { var partition = reader.ReadInt32(); var offset = reader.ReadInt64(); var metadata = reader.ReadShortString(); var error = reader.ReadInt16(); if (!data.ContainsKey(topic)) { data.Add(topic, new List <OffsetFetchResponseInfo>()); } data[topic].Add(new OffsetFetchResponseInfo(partition, offset, metadata, error)); } } return(new OffsetFetchResponse(size, correlationid, data)); }
public static PartitionOffsetsResponse ReadFrom(KafkaBinaryReader reader) { var partitionId = reader.ReadInt32(); var error = reader.ReadInt16(); var numOffsets = reader.ReadInt32(); var offsets = new List <long>(); for (int o = 0; o < numOffsets; ++o) { offsets.Add(reader.ReadInt64()); } return(new PartitionOffsetsResponse(partitionId, error, offsets)); }
public static FetchResponse ParseFrom(KafkaBinaryReader reader) { FetchResponse result = null; DateTime startUtc = DateTime.UtcNow; int size = 0, correlationId = 0, dataCount = 0; try { size = reader.ReadInt32(); Logger.Debug("FetchResponse.ParseFrom: read size byte after " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds, packet size " + size); startUtc = DateTime.UtcNow; byte[] remainingBytes = reader.ReadBytes(size); Logger.Debug("FetchResponse.ParseFrom: read remaining bytes after " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds"); startUtc = DateTime.UtcNow; KafkaBinaryReader dataReader = new KafkaBinaryReader(new MemoryStream(remainingBytes)); correlationId = dataReader.ReadInt32(); dataCount = dataReader.ReadInt32(); var data = new FetchResponseTopicInfo[dataCount]; // !!! improvement !!! // just receive the bytes, and try to parse them later // directly parse the record here, or just keep the bytes to speed up the fetch response for (int i = 0; i < dataCount; i++) { var topic = dataReader.ReadShortString(); var partitionCount = dataReader.ReadInt32(); startUtc = DateTime.UtcNow; var partitions = new FetchResponsePartitionInfo[partitionCount]; for (int j = 0; j < partitionCount; j++) { var partition = dataReader.ReadInt32(); var error = dataReader.ReadInt16(); var highWatermark = dataReader.ReadInt64(); var messageSetSize = dataReader.ReadInt32(); var messageSetBytes = dataReader.ReadBytes(messageSetSize); Logger.Debug("FetchResponse.ParseFrom: topic " + topic + " partition " + partition + " should get records in " + messageSetSize + " bytes, error " + error + " watermark " + highWatermark); partitions[j] = new FetchResponsePartitionInfo(partition, error, highWatermark, messageSetBytes); } Logger.Debug("FetchResponse.ParseFrom: read " + partitionCount + " partitions for segment " + (i + 1) + " use " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds"); data[i] = new FetchResponseTopicInfo(topic, partitions); } result = new FetchResponse(correlationId, data, size); Logger.Debug("FetchResponse.ParseFrom: read bytes into structure complete after " + TimeSpan.FromTicks(DateTime.UtcNow.Ticks - startUtc.Ticks).TotalSeconds + " seconds"); } catch (OutOfMemoryException mex) { Logger.Error( string.Format( "OOM Error. Data values were: size: {0}, correlationId: {1}, dataCound: {2}.\r\nFull Stack of exception: {3}", size, correlationId, dataCount, mex.StackTrace)); throw; } catch (Exception e) { Logger.Debug("FetchResponse.ParseFrom: parse response failed\r\n" + e); throw; } return(result); }