+ 4; // batchLength /// <remarks> /// From the official protocol documentation available at /// https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-FetchAPI /// "As an optimization the server is allowed to return a partial message at the end of the message set. Clients should handle this case." /// If the end of the RecordBatch exceeds the length of the whole response (= endOfAllBatches), we should discard this RecordBatch. /// </remarks> public static RecordBatch Deserialize(ReusableMemoryStream input, Deserializers deserializers, long endOfAllBatches) { var recordBatch = new RecordBatch(); if (input.Position + BytesNecessaryToGetLength > endOfAllBatches) { throw new ProtocolException( $"Trying to read a batch record at {input.Position} and the end of all batches is {endOfAllBatches}." + $" There is not enough bytes remaining to even read the first fields..."); } recordBatch.BaseOffset = BigEndianConverter.ReadInt64(input); var batchLength = BigEndianConverter.ReadInt32(input); var endOfBatch = input.Position + batchLength; if (endOfAllBatches < endOfBatch) { // Partial message, CRCs won't match, return here so the CRC check doesn't throw return(null); } recordBatch.PartitionLeaderEpoch = BigEndianConverter.ReadInt32(input); var magic = input.ReadByte(); // Current magic value is 2 if ((uint)magic != 2) { throw new UnsupportedMagicByteVersion((byte)magic, "2"); } var crc = (uint)BigEndianConverter.ReadInt32(input); var afterCrcPosition = input.Position; // The crc is calculated starting from this position Crc32.CheckCrcCastagnoli((int)crc, input, afterCrcPosition, endOfBatch - afterCrcPosition); var attributes = BigEndianConverter.ReadInt16(input); recordBatch.CompressionCodec = (CompressionCodec)(attributes & CompressionCodecMask); recordBatch.IsTransactional = (attributes & TransactionalFlagMask) != 0; recordBatch.IsControl = (attributes & ControlFlagMask) != 0; recordBatch.TimestampType = (attributes & TimestampTypeMask) > 0 ? TimestampType.LogAppendTime : TimestampType.CreateTime; var lastOffsetDelta = BigEndianConverter.ReadInt32(input); var firstTimestamp = BigEndianConverter.ReadInt64(input); var maxTimestamp = BigEndianConverter.ReadInt64(input); recordBatch.ProducerId = BigEndianConverter.ReadInt64(input); recordBatch.ProducerEpoch = BigEndianConverter.ReadInt16(input); recordBatch.BaseSequence = BigEndianConverter.ReadInt32(input); var numberOfRecords = BigEndianConverter.ReadInt32(input); recordBatch.Records = DeserializeRecords(recordBatch, input, numberOfRecords, endOfBatch, firstTimestamp, deserializers); return(recordBatch); }