public static BufferedMessageSet Decompress(Message message) { switch (message.CompressionCodec) { case CompressionCodecs.DefaultCompressionCodec: case CompressionCodecs.GZIPCompressionCodec: byte[] inputBytes = message.Payload; using (MemoryStream outputStream = new MemoryStream()) { using (MemoryStream inputStream = new MemoryStream(inputBytes)) { using (GZipStream gzipInputStream = new GZipStream(inputStream, CompressionMode.Decompress)) { try { gzipInputStream.CopyTo(outputStream); gzipInputStream.Close(); } catch (IOException ex) { Logger.Error("Error while reading from the GZIP input stream", ex); throw; } } } return(BufferedMessageSet.ParseFrom(outputStream.ToArray())); } default: throw new UnknownCodecException(String.Format(CultureInfo.CurrentCulture, "Unknown Codec: {0}", message.CompressionCodec)); } }
public static BufferedMessageSet Decompress(Message message, int partition) { switch (message.CompressionCodec) { case CompressionCodecs.DefaultCompressionCodec: case CompressionCodecs.GZIPCompressionCodec: var inputBytes = message.Payload; using (var outputStream = new MemoryStream()) { using (var inputStream = new MemoryStream(inputBytes)) { using (var gzipInputStream = new GZipStream(inputStream, CompressionMode.Decompress)) { try { gzipInputStream.CopyTo(outputStream); gzipInputStream.Close(); } catch (IOException ex) { Logger.InfoFormat("Error while reading from the GZIP input stream: {0}", ex.FormatException()); throw; } } } outputStream.Position = 0; using (var reader = new KafkaBinaryReader(outputStream)) { return(BufferedMessageSet.ParseFrom(reader, (int)outputStream.Length, partition)); } } case CompressionCodecs.SnappyCompressionCodec: try { using (var stream = new MemoryStream(SnappyHelper.Decompress(message.Payload))) { using (var reader = new KafkaBinaryReader(stream)) { return(BufferedMessageSet.ParseFrom(reader, (int)stream.Length, partition)); } } } catch (Exception ex) { Logger.ErrorFormat("Error while reading from the Snappy input stream {0}", ex.FormatException()); throw; } default: throw new UnknownCodecException(string.Format(CultureInfo.CurrentCulture, "Unknown Codec: {0}", message.CompressionCodec)); } }
public ProducerRequest(string topic, int partition, BufferedMessageSet messages) { Guard.Assert<ArgumentNullException>(() => messages != null); int length = GetRequestLength(topic, messages.SetSize); this.RequestBuffer = new BoundedBuffer(length); this.Topic = topic; this.Partition = partition; this.MessageSet = messages; this.WriteTo(this.RequestBuffer); }
public ProducerRequest(string topic, int partition, BufferedMessageSet messages) { Guard.NotNull(messages, "messages"); int length = GetRequestLength(topic, messages.SetSize); this.RequestBuffer = new BoundedBuffer(length); this.Topic = topic; this.Partition = partition; this.MessageSet = messages; this.WriteTo(this.RequestBuffer); }
public void BufferedMessageSetWriteToValidSequence() { byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5 }; Message msg1 = new Message(messageBytes); Message msg2 = new Message(messageBytes); MessageSet messageSet = new BufferedMessageSet(new List<Message>() { msg1, msg2 }); MemoryStream ms = new MemoryStream(); messageSet.WriteTo(ms); ////first message byte[] messageLength = new byte[MessageLengthPartLength]; Array.Copy(ms.ToArray(), MessageLengthPartOffset, messageLength, 0, MessageLengthPartLength); if (BitConverter.IsLittleEndian) { Array.Reverse(messageLength); } Assert.AreEqual(MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length, BitConverter.ToInt32(messageLength, 0)); Assert.AreEqual(1, ms.ToArray()[MagicNumberPartOffset]); // default magic number should be 1 byte[] checksumPart = new byte[ChecksumPartLength]; Array.Copy(ms.ToArray(), ChecksumPartOffset, checksumPart, 0, ChecksumPartLength); Assert.AreEqual(Crc32Hasher.Compute(messageBytes), checksumPart); byte[] dataPart = new byte[messageBytes.Length]; Array.Copy(ms.ToArray(), DataPartOffset, dataPart, 0, messageBytes.Length); Assert.AreEqual(messageBytes, dataPart); ////second message int secondMessageOffset = MessageLengthPartLength + MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length; messageLength = new byte[MessageLengthPartLength]; Array.Copy(ms.ToArray(), secondMessageOffset + MessageLengthPartOffset, messageLength, 0, MessageLengthPartLength); if (BitConverter.IsLittleEndian) { Array.Reverse(messageLength); } Assert.AreEqual(MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length, BitConverter.ToInt32(messageLength, 0)); Assert.AreEqual(1, ms.ToArray()[secondMessageOffset + MagicNumberPartOffset]); // default magic number should be 1 checksumPart = new byte[ChecksumPartLength]; Array.Copy(ms.ToArray(), secondMessageOffset + ChecksumPartOffset, checksumPart, 0, ChecksumPartLength); Assert.AreEqual(Crc32Hasher.Compute(messageBytes), checksumPart); dataPart = new byte[messageBytes.Length]; Array.Copy(ms.ToArray(), secondMessageOffset + DataPartOffset, dataPart, 0, messageBytes.Length); Assert.AreEqual(messageBytes, dataPart); }
public void ShouldAbleToEnumerateMessages() { var msg1 = new Message(new byte[101]) {Offset = 0}; var msg2 = new Message(new byte[102]) {Offset = 1}; var set = new BufferedMessageSet(new List<Message>() {msg1, msg2}, 0); set.MoveNext().Should().BeTrue(); set.Current.Message.Payload.Length.Should().Be(101); set.Current.Message.Offset.Should().Be(0); set.MoveNext().Should().BeTrue(); set.Current.Message.Payload.Length.Should().Be(102); set.Current.Message.Offset.Should().Be(1); set.MoveNext().Should().BeFalse(); }
public void ShouldAbleToWriteMessageSetWithExtraBytes() { var stream = new MemoryStream(); var writer = new KafkaBinaryWriter(stream); var msg1 = new Message(new byte[101]) {Offset = 0}; var msg2 = new Message(new byte[102]) {Offset = 1}; var set = new BufferedMessageSet(new List<Message>() {msg1, msg2}, 0); set.WriteTo(writer); writer.Write(new byte[10]); // less than offset and size var size = (int) stream.Position; stream.Seek(0, SeekOrigin.Begin); var reader = new KafkaBinaryReader(stream); var newSet = BufferedMessageSet.ParseFrom(reader, size, 0); var messages = newSet.Messages.ToList(); messages.Count().Should().Be(2); messages[0].Payload.Count().Should().Be(101); messages[1].Payload.Count().Should().Be(102); }
public static Message Compress(IEnumerable <Message> messages, CompressionCodecs compressionCodec) { switch (compressionCodec) { case CompressionCodecs.DefaultCompressionCodec: case CompressionCodecs.GZIPCompressionCodec: using (MemoryStream outputStream = new MemoryStream()) { using (GZipStream gZipStream = new GZipStream(outputStream, CompressionMode.Compress)) { if (Logger.IsDebugEnabled) { Logger.DebugFormat( CultureInfo.CurrentCulture, "Allocating BufferedMessageSet of size = {0}", MessageSet.GetMessageSetSize(messages)); } var bufferedMessageSet = new BufferedMessageSet(messages); using (MemoryStream inputStream = new MemoryStream(bufferedMessageSet.SetSize)) { bufferedMessageSet.WriteTo(inputStream); inputStream.Position = 0; try { gZipStream.Write(inputStream.ToArray(), 0, inputStream.ToArray().Length); gZipStream.Close(); } catch (IOException ex) { Logger.Error("Error while writing to the GZIP stream", ex); throw; } } Message oneCompressedMessage = new Message(outputStream.ToArray(), compressionCodec); return(oneCompressedMessage); } } default: throw new UnknownCodecException(String.Format(CultureInfo.CurrentCulture, "Unknown Codec: {0}", compressionCodec)); } }
public static Message Compress(IEnumerable<Message> messages, CompressionCodecs compressionCodec, int partition) { switch (compressionCodec) { case CompressionCodecs.DefaultCompressionCodec: case CompressionCodecs.GZIPCompressionCodec: using (var outputStream = new MemoryStream()) { using (var gZipStream = new GZipStream(outputStream, CompressionMode.Compress)) { if (Logger.IsDebugEnabled) { Logger.DebugFormat( "Allocating BufferedMessageSet of size = {0}", MessageSet.GetMessageSetSize(messages)); } var bufferedMessageSet = new BufferedMessageSet(messages, partition); using (var inputStream = new MemoryStream(bufferedMessageSet.SetSize)) { bufferedMessageSet.WriteTo(inputStream); inputStream.Position = 0; try { gZipStream.Write(inputStream.ToArray(), 0, inputStream.ToArray().Length); gZipStream.Close(); } catch (IOException ex) { Logger.ErrorFormat("Error while writing to the GZIP stream {0}", ex.FormatException()); throw; } } Message oneCompressedMessage = new Message(outputStream.ToArray(), compressionCodec) { PartitionId = partition }; return oneCompressedMessage; } } case CompressionCodecs.SnappyCompressionCodec: Logger.DebugFormat( "Allocating BufferedMessageSet of size = {0}", MessageSet.GetMessageSetSize(messages)); var messageSet = new BufferedMessageSet(messages, partition); using (var inputStream = new MemoryStream(messageSet.SetSize)) { messageSet.WriteTo(inputStream); inputStream.Position = 0; try { return new Message(SnappyHelper.Compress(inputStream.GetBuffer()), compressionCodec) { PartitionId = partition }; } catch (Exception ex) { Logger.ErrorFormat("Error while writing to the Snappy stream {0}", ex.FormatException()); throw; } } default: throw new UnknownCodecException(String.Format(CultureInfo.CurrentCulture, "Unknown Codec: {0}", compressionCodec)); } }
public static Message Compress(IEnumerable <Message> messages, CompressionCodecs compressionCodec, int partition) { switch (compressionCodec) { case CompressionCodecs.DefaultCompressionCodec: case CompressionCodecs.GZIPCompressionCodec: using (var outputStream = new MemoryStream()) { using (var gZipStream = new GZipStream(outputStream, CompressionMode.Compress)) { //if (Logger.IsDebugEnabled) { Logger.DebugFormat( "Allocating BufferedMessageSet of size = {0}", MessageSet.GetMessageSetSize(messages)); } var bufferedMessageSet = new BufferedMessageSet(messages, partition); using (var inputStream = new MemoryStream(bufferedMessageSet.SetSize)) { bufferedMessageSet.WriteTo(inputStream); inputStream.Position = 0; try { gZipStream.Write(inputStream.ToArray(), 0, inputStream.ToArray().Length); gZipStream.Close(); } catch (IOException ex) { Logger.ErrorFormat("Error while writing to the GZIP stream {0}", ex.FormatException()); throw; } } var oneCompressedMessage = new Message(outputStream.ToArray(), compressionCodec) { PartitionId = partition }; return(oneCompressedMessage); } } case CompressionCodecs.SnappyCompressionCodec: Logger.DebugFormat( "Allocating BufferedMessageSet of size = {0}", MessageSet.GetMessageSetSize(messages)); var messageSet = new BufferedMessageSet(messages, partition); using (var inputStream = new MemoryStream(messageSet.SetSize)) { messageSet.WriteTo(inputStream); inputStream.Position = 0; try { return(new Message(SnappyHelper.Compress(inputStream.GetBuffer()), compressionCodec) { PartitionId = partition }); } catch (Exception ex) { Logger.ErrorFormat("Error while writing to the Snappy stream {0}", ex.FormatException()); throw; } } default: throw new UnknownCodecException(string.Format(CultureInfo.CurrentCulture, "Unknown Codec: {0}", compressionCodec)); } }
/// <summary> /// Ads a message set to the queue /// </summary> /// <param name="messages">The message set</param> /// <returns>The set size</returns> public int Add(BufferedMessageSet messages) { int size = messages.SetSize; if (size > 0) { long offset = messages.Messages.Last().Offset; Logger.InfoFormat("{2} : Updating fetch offset = {0} with value = {1}", this.fetchedOffset, offset, this.PartitionId); this.chunkQueue.Add(new FetchedDataChunk(messages, this, this.fetchedOffset)); Interlocked.Exchange(ref this.fetchedOffset, offset); Logger.Debug("Updated fetch offset of " + this + " to " + offset); } return size; }
public void SetSizeValid() { byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5 }; Message msg1 = new Message(messageBytes); Message msg2 = new Message(messageBytes); MessageSet messageSet = new BufferedMessageSet(new List<Message>() { msg1, msg2 }); Assert.AreEqual( 2 * (MessageLengthPartLength + MagicNumberPartLength + AttributesPartLength + ChecksumPartLength + messageBytes.Length), messageSet.SetSize); }
public int Add(BufferedMessageSet messages, long fetchOffset) { int size = messages.SetSize; if (size > 0) { long newOffset = Interlocked.Add(ref this.fetchedOffset, size); Logger.Debug("Updated fetch offset of " + this + " to " + newOffset); this.chunkQueue.Add(new FetchedDataChunk(messages, this, fetchOffset)); } return size; }