/// <summary> /// Parse the response bytes. /// </summary> /// <param name="response">The bytes of the response.</param> public override void Parse(byte[] response) { using (var stream = new MemoryStream(response)) { var reader = new BinaryReader(stream); var errorCodeBytes = new byte[2]; reader.Read(errorCodeBytes, 0, 2); this.ErrorCode = KafkaProtocolPrimitiveType.GetInt16(errorCodeBytes); var coordinatorIdBytes = new byte[4]; reader.Read(coordinatorIdBytes, 0, 4); this.CoordinatorId = KafkaProtocolPrimitiveType.GetInt32(coordinatorIdBytes); var coordinatorHostSizeBytes = new byte[2]; reader.Read(coordinatorHostSizeBytes, 0, 2); var coordinatorHostSize = KafkaProtocolPrimitiveType.GetInt16(coordinatorHostSizeBytes); var coordinatorHostBytes = new byte[coordinatorHostSize]; reader.Read(coordinatorHostBytes, 0, coordinatorHostSize); this.CoordinatorHost = Encoding.UTF8.GetString(coordinatorHostBytes); var coordinatorPortBytes = new byte[4]; reader.Read(coordinatorPortBytes, 0, 4); this.CoordinatorPort = KafkaProtocolPrimitiveType.GetInt32(coordinatorPortBytes); } }
/// <summary> /// Parse the response bytes. /// </summary> /// <param name="response">The bytes of the response.</param> public override void Parse(byte[] response) { //---------------------------------------------------\\ // OffsetResponse => ReplicaId [TopicName[PartitionOffsets]] // PartitionOffsets => Partition ErrorCode[Offset] // Partition => int32 // ErrorCode => int16 // Offset => int64 //---------------------------------------------------\\ using (var stream = new MemoryStream(response)) { var reader = new BinaryReader(stream); // size of topic array var topicInfoSizeBytes = new byte[4]; reader.Read(topicInfoSizeBytes, 0, 4); var numberOfTopicInfos = KafkaProtocolPrimitiveType.GetInt32(topicInfoSizeBytes); this.TopicInfos = new KafkaProtocolOffsetListResponseTopicInfo[numberOfTopicInfos]; for (int i = 0; i < numberOfTopicInfos; i++) { this.TopicInfos[i] = new KafkaProtocolOffsetListResponseTopicInfo(); // topic name var topicNameSizeBytes = new byte[2]; reader.Read(topicNameSizeBytes, 0, 2); var topicNameSize = KafkaProtocolPrimitiveType.GetInt16(topicNameSizeBytes); var topicNameBytes = new byte[topicNameSize]; reader.Read(topicNameBytes, 0, topicNameSize); this.TopicInfos[i].TopicName = Encoding.UTF8.GetString(topicNameBytes); // size of partition offset array var partitionInfoSizeBytes = new byte[4]; reader.Read(partitionInfoSizeBytes, 0, 4); var partitionInfoSize = KafkaProtocolPrimitiveType.GetInt32(partitionInfoSizeBytes); this.TopicInfos[i].PartitionInfos = new KafkaProtocolOffsetListResponsePartitionInfo[partitionInfoSize]; for (int j = 0; j < partitionInfoSize; j++) { this.TopicInfos[i].PartitionInfos[j] = new KafkaProtocolOffsetListResponsePartitionInfo(); // partition id var partitionIdBytes = new byte[4]; reader.Read(partitionIdBytes, 0, 4); this.TopicInfos[i].PartitionInfos[j].Partition = KafkaProtocolPrimitiveType.GetInt32(partitionIdBytes); // error code var errorCodeBytes = new byte[2]; reader.Read(errorCodeBytes, 0, 2); this.TopicInfos[i].PartitionInfos[j].ErrorCode = KafkaProtocolPrimitiveType.GetInt16(errorCodeBytes); // size of offset array var offsetSizeBytes = new byte[4]; reader.Read(offsetSizeBytes, 0, 4); var offsetSize = KafkaProtocolPrimitiveType.GetInt32(offsetSizeBytes); this.TopicInfos[i].PartitionInfos[j].Offset = new long[offsetSize]; for (int k = 0; k < offsetSize; k++) { var offsetBytes = new byte[8]; reader.Read(offsetBytes, 0, 8); this.TopicInfos[i].PartitionInfos[j].Offset[k] = KafkaProtocolPrimitiveType.GetInt64(offsetBytes); } } } } }
/// <summary> /// Parse the response bytes. /// </summary> /// <param name="response">The bytes of the response.</param> public override void Parse(byte[] response) { using (var stream = new MemoryStream(response)) { var reader = new BinaryReader(stream); var topicInfoSizeBytes = new byte[4]; reader.Read(topicInfoSizeBytes, 0, 4); var numOfTopicInfo = KafkaProtocolPrimitiveType.GetInt32(topicInfoSizeBytes); this.TopicInfos = new KafkaProtocolOffsetFetchResponseTopicInfo[numOfTopicInfo]; for (int i = 0; i < numOfTopicInfo; i++) { this.TopicInfos[i] = new KafkaProtocolOffsetFetchResponseTopicInfo(); // topic name var topicNameSizeBytes = new byte[2]; reader.Read(topicNameSizeBytes, 0, 2); var topicNameSize = KafkaProtocolPrimitiveType.GetInt16(topicNameSizeBytes); var topicNameBytes = new byte[topicNameSize]; reader.Read(topicNameBytes, 0, topicNameSize); var topicName = Encoding.UTF8.GetString(topicNameBytes); this.TopicInfos[i].TopicName = topicName; // [partition info] var partitionInfoSizeBytes = new byte[4]; reader.Read(partitionInfoSizeBytes, 0, 4); var numOfPartitionInfos = KafkaProtocolPrimitiveType.GetInt32(partitionInfoSizeBytes); this.TopicInfos[i].PartitionInfos = new KafkaProtocolOffsetFetchResponsePartitionInfo[numOfPartitionInfos]; for (int j = 0; j < numOfPartitionInfos; j++) { this.TopicInfos[i].PartitionInfos[j] = new KafkaProtocolOffsetFetchResponsePartitionInfo(); // partition id var partitionBytes = new byte[4]; reader.Read(partitionBytes, 0, 4); var partitionId = KafkaProtocolPrimitiveType.GetInt32(partitionBytes); this.TopicInfos[i].PartitionInfos[j].Partition = partitionId; // offset var offsetBytes = new byte[8]; reader.Read(offsetBytes, 0, 8); var offset = KafkaProtocolPrimitiveType.GetInt64(offsetBytes); this.TopicInfos[i].PartitionInfos[j].Offset = offset; // metadata var metadataSizeBytes = new byte[2]; reader.Read(metadataSizeBytes, 0, 2); var metadataSize = KafkaProtocolPrimitiveType.GetInt16(metadataSizeBytes); var metadataBytes = new byte[metadataSize]; reader.Read(metadataBytes, 0, metadataSize); var metadata = Encoding.UTF8.GetString(metadataBytes); this.TopicInfos[i].PartitionInfos[j].Metadata = metadata; // error code var errorCodeBytes = new byte[2]; reader.Read(errorCodeBytes, 0, 2); var errorCode = KafkaProtocolPrimitiveType.GetInt16(errorCodeBytes); this.TopicInfos[i].PartitionInfos[j].ErrorCode = errorCode; } } } }