private static ByteBuffer Create( AtomicLong offsetCounter, CompressionCodecs compressionCodec, List <Message> messages) { if (messages == null || !messages.Any()) { return(Empty.Buffer); } else if (CompressionCodecs.NoCompressionCodec == compressionCodec) { var buffer = ByteBuffer.Allocate(MessageSetSize(messages)); foreach (var message in messages) { WriteMessage(buffer, message, offsetCounter.GetAndIncrement()); } buffer.Rewind(); return(buffer); } else { var byteArrayStream = new MemoryStream(MessageSetSize(messages)); var offset = -1L; using (var output = new KafkaBinaryWriter(CompressionFactory.BuildWriter(compressionCodec, byteArrayStream))) { foreach (var message in messages) { offset = offsetCounter.GetAndIncrement(); output.Write(offset); output.Write(message.Size); output.Write(message.Buffer.Array, message.Buffer.ArrayOffset(), message.Buffer.Limit()); } } var bytes = byteArrayStream.ToArray(); var msg = new Message(bytes, compressionCodec); var buffer = ByteBuffer.Allocate(msg.Size + LogOverhead); WriteMessage(buffer, msg, offset); buffer.Rewind(); return(buffer); } }
public static ByteBufferMessageSet Decompress(Message message) { var outputStream = new MemoryStream(); var inputStream = message.Payload; var intermediateBuffer = new byte[1024]; using (var compressed = CompressionFactory.BuildReader(message.CompressionCodec, inputStream)) { int read; while ((read = compressed.Read(intermediateBuffer, 0, 1024)) > 0) { outputStream.Write(intermediateBuffer, 0, read); } } var outputBuffer = ByteBuffer.Allocate((int)outputStream.Length); outputBuffer.Put(outputStream.ToArray()); outputBuffer.Rewind(); return(new ByteBufferMessageSet(outputBuffer)); }