private IByteBuffer CompressMessages( IEnumerable <RequestMessage> messages, IByteBuffer uncompressedBuffer, MessageEncoderSettings messageEncoderSettings) { var outputBufferChunkSource = new OutputBufferChunkSource(BsonChunkPool.Default); var compressedBuffer = new MultiChunkBuffer(outputBufferChunkSource); using (var uncompressedStream = new ByteBufferStream(uncompressedBuffer, ownsBuffer: false)) using (var compressedStream = new ByteBufferStream(compressedBuffer, ownsBuffer: false)) { foreach (var message in messages) { var uncompressedMessageLength = uncompressedStream.ReadInt32(); uncompressedStream.Position -= 4; using (var uncompressedMessageSlice = uncompressedBuffer.GetSlice((int)uncompressedStream.Position, uncompressedMessageLength)) using (var uncompressedMessageStream = new ByteBufferStream(uncompressedMessageSlice, ownsBuffer: false)) { if (message.MayBeCompressed) { CompressMessage(message, uncompressedMessageStream, compressedStream, messageEncoderSettings); } else { uncompressedMessageStream.EfficientCopyTo(compressedStream); } } } compressedBuffer.Length = (int)compressedStream.Length; } return(compressedBuffer); }
public void WriteSlice_should_have_expected_effect( [Values(0, 1, 2, 16)] int length, [Values(1, 2, 3)] int numberOfChunks) { numberOfChunks = length == 0 ? 1 : length < numberOfChunks ? length : numberOfChunks; using (var memoryStream = new MemoryStream()) using (var stream = new BsonStreamAdapter(memoryStream)) { IByteBuffer slice; var bytes = Enumerable.Range(0, length).Select(n => (byte)n).ToArray(); if (numberOfChunks == 1) { slice = new ByteArrayBuffer(bytes, isReadOnly: true); } else { var chunkSize = length / numberOfChunks; var chunks = Enumerable.Range(0, numberOfChunks) .Select(i => bytes.Skip(i * chunkSize).Take(i < numberOfChunks - 1 ? chunkSize : int.MaxValue).ToArray()) .Select(b => new ByteArrayChunk(b)); slice = new MultiChunkBuffer(chunks); } stream.WriteSlice(slice); memoryStream.ToArray().Should().Equal(bytes); } }
public IByteBuffer EncodeMessages(CancellationToken cancellationToken, out List <RequestMessage> sentMessages) { sentMessages = new List <RequestMessage>(); cancellationToken.ThrowIfCancellationRequested(); var serializationStopwatch = Stopwatch.StartNew(); var outputBufferChunkSource = new OutputBufferChunkSource(BsonChunkPool.Default); var buffer = new MultiChunkBuffer(outputBufferChunkSource); using (var stream = new ByteBufferStream(buffer, ownsBuffer: false)) { var encoderFactory = new BinaryMessageEncoderFactory(stream, _messageEncoderSettings, compressorSource: null); foreach (var message in _messages) { if (message.ShouldBeSent == null || message.ShouldBeSent()) { var encoder = message.GetEncoder(encoderFactory); encoder.WriteMessage(message); message.WasSent = true; sentMessages.Add(message); } // Encoding messages includes serializing the // documents, so encoding message could be expensive // and worthy of us honoring cancellation here. cancellationToken.ThrowIfCancellationRequested(); } buffer.Length = (int)stream.Length; buffer.MakeReadOnly(); } serializationStopwatch.Stop(); _serializationDuration = serializationStopwatch.Elapsed; return(buffer); }
public async Task SendMessagesAsync(IEnumerable <RequestMessage> messages, MessageEncoderSettings messageEncoderSettings, CancellationToken cancellationToken) { Ensure.IsNotNull(messages, "messages"); ThrowIfDisposedOrNotOpen(); var messagesToSend = messages.ToList(); try { if (_listener != null) { _listener.ConnectionBeforeSendingMessages(new ConnectionBeforeSendingMessagesEvent(_connectionId, messagesToSend)); } cancellationToken.ThrowIfCancellationRequested(); using (var buffer = new MultiChunkBuffer(BsonChunkPool.Default)) { using (var stream = new ByteBufferStream(buffer, ownsByteBuffer: false)) { var encoderFactory = new BinaryMessageEncoderFactory(stream, messageEncoderSettings); foreach (var message in messagesToSend) { if (message.ShouldBeSent == null || message.ShouldBeSent()) { var encoder = message.GetEncoder(encoderFactory); encoder.WriteMessage(message); message.WasSent = true; } // Encoding messages includes serializing the // documents, so encoding message could be expensive // and worthy of us honoring cancellation here. cancellationToken.ThrowIfCancellationRequested(); } buffer.Length = (int)stream.Length; } var stopwatch = Stopwatch.StartNew(); var entry = new OutboundQueueEntry(buffer, cancellationToken); _outboundQueue.Enqueue(entry); await entry.Task.ConfigureAwait(false); stopwatch.Stop(); if (_listener != null) { _listener.ConnectionAfterSendingMessages(new ConnectionAfterSendingMessagesEvent(_connectionId, messagesToSend, buffer.Length, stopwatch.Elapsed)); } } } catch (Exception ex) { if (_listener != null) { _listener.ConnectionErrorSendingMessages(new ConnectionErrorSendingMessagesEvent(_connectionId, messagesToSend, ex)); } throw; } }
public void constructor_with_chunks_should_default_isReadOnly_to_false() { var chunks = Enumerable.Empty <IBsonChunk>(); var subject = new MultiChunkBuffer(chunks); subject.IsReadOnly.Should().BeFalse(); }
public void constructor_with_chunks_should_compute_capacity(int numberOfChunks, int expectedCapacity) { var chunkSizes = Enumerable.Range(1, numberOfChunks); var chunks = CreateChunks(chunkSizes); var subject = new MultiChunkBuffer(chunks); subject.Capacity.Should().Be(expectedCapacity); }
public void constructor_with_chunks_should_default_length_to_capacity(int numberOfChunks) { var chunkSizes = Enumerable.Range(1, numberOfChunks); var chunks = CreateChunks(chunkSizes); var subject = new MultiChunkBuffer(chunks); subject.Length.Should().Be(subject.Capacity); }
public async Task SendMessagesAsync(IEnumerable <RequestMessage> messages, MessageEncoderSettings messageEncoderSettings, TimeSpan timeout, CancellationToken cancellationToken) { Ensure.IsNotNull(messages, "messages"); Ensure.IsInfiniteOrGreaterThanOrEqualToZero(timeout, "timeout"); ThrowIfDisposedOrNotOpen(); var messagesToSend = messages.ToList(); try { if (_listener != null) { _listener.ConnectionBeforeSendingMessages(_connectionId, messagesToSend); } using (var buffer = new MultiChunkBuffer(BsonChunkPool.Default)) { using (var stream = new ByteBufferStream(buffer, ownsByteBuffer: false)) { var encoderFactory = new BinaryMessageEncoderFactory(stream, messageEncoderSettings); foreach (var message in messagesToSend) { if (message.ShouldBeSent == null || message.ShouldBeSent()) { var encoder = message.GetEncoder(encoderFactory); encoder.WriteMessage(message); message.WasSent = true; } } buffer.Length = (int)stream.Length; } var stopwatch = Stopwatch.StartNew(); var entry = new OutboundQueueEntry(buffer, cancellationToken); _outboundQueue.Enqueue(entry); await entry.Task.ConfigureAwait(false); stopwatch.Stop(); if (_listener != null) { _listener.ConnectionAfterSendingMessages(_connectionId, messagesToSend, buffer.Length, stopwatch.Elapsed); } } } catch (Exception ex) { if (_listener != null) { _listener.ConnectionErrorSendingMessages(_connectionId, messagesToSend, ex); } throw; } }
public void ExpandCapacity_should_throw_when_expanded_capacity_exceeds_2GB() { using (var subject = new MultiChunkBuffer(BsonChunkPool.Default)) { subject.EnsureCapacity(int.MaxValue - 128 * 1024 * 1024); Action action = () => subject.EnsureCapacity(int.MaxValue); // indirectly calls private ExpandCapacity method action.ShouldThrow <InvalidOperationException>(); } }
public void AccessBackingBytes_should_return_expected_result_when_there_are_zero_chunks() { var mockChunkSource = new Mock <IBsonChunkSource>(); var subject = new MultiChunkBuffer(mockChunkSource.Object); var result = subject.AccessBackingBytes(0); result.Array.Should().HaveCount(0); result.Offset.Should().Be(0); result.Count.Should().Be(0); }
public void constructor_with_chunks_should_compute_positions(int numberOfChunks, int[] expectedPositions) { var chunkSizes = Enumerable.Range(1, numberOfChunks); var chunks = CreateChunks(chunkSizes); var subject = new MultiChunkBuffer(chunks); var reflector = new Reflector(subject); reflector._positions.Should().Equal(expectedPositions); }
public void AccessBackingBytes_should_adjust_count_when_multiple_chunks_are_present() { var arrays = new[] { new byte[] { 1, 2 }, new byte[] { 3, 4 } }; var chunks = arrays.Select(a => new ByteArrayChunk(a)); var buffer = new MultiChunkBuffer(chunks, isReadOnly: true); var subject = new ByteBufferSlice(buffer, 1, 2); var result = subject.AccessBackingBytes(0); result.Array.Should().BeSameAs(arrays[0]); result.Offset.Should().Be(1); result.Count.Should().Be(1); // not 2 or 3 }
public void Dispose_should_dispose_chunks( [Values(0, 1, 2, 3)] int numberOfChunks) { var chunks = Enumerable.Range(1, numberOfChunks).Select(_ => Substitute.For <IBsonChunk>()).ToList(); var subject = new MultiChunkBuffer(chunks); subject.Dispose(); foreach (var chunk in chunks) { chunk.Received(1).Dispose(); } }
public void EnsureCapacity_should_have_expected_effect(int minimumCapacity, int[] expectedChunkSizes) { var chunkSource = Substitute.For <IBsonChunkSource>(); var subject = new MultiChunkBuffer(chunkSource); var chunkSize = 1; chunkSource.GetChunk(Arg.Any <int>()).Returns(x => new ByteArrayChunk(chunkSize++)); subject.EnsureCapacity(minimumCapacity); var reflector = new Reflector(subject); subject.Capacity.Should().BeGreaterOrEqualTo(minimumCapacity); reflector._chunks.Select(c => c.Bytes.Count).Should().Equal(expectedChunkSizes); }
public void EnsureCapacity_should_have_expected_effect(int minimumCapacity, int[] expectedChunkSizes) { var mockChunkSource = new Mock <IBsonChunkSource>(); var subject = new MultiChunkBuffer(mockChunkSource.Object); var chunkSize = 1; mockChunkSource.Setup(s => s.GetChunk(It.IsAny <int>())).Returns(() => new ByteArrayChunk(chunkSize++)); subject.EnsureCapacity(minimumCapacity); var reflector = new Reflector(subject); subject.Capacity.Should().BeGreaterOrEqualTo(minimumCapacity); reflector._chunks.Select(c => c.Bytes.Count).Should().Equal(expectedChunkSizes); }
public void Dispose_should_dispose_chunks( [Values(0, 1, 2, 3)] int numberOfChunks) { var chunks = Enumerable.Range(1, numberOfChunks).Select(_ => new Mock <IBsonChunk>().Object).ToList(); var subject = new MultiChunkBuffer(chunks); subject.Dispose(); foreach (var chunk in chunks) { var mockChunk = Mock.Get(chunk); mockChunk.Verify(c => c.Dispose(), Times.Once); } }
public void constructor_with_chunkSource_should_initialize_subject() { var mockChunkSource = new Mock <IBsonChunkSource>(); var subject = new MultiChunkBuffer(mockChunkSource.Object); var reflector = new Reflector(subject); subject.Capacity.Should().Be(0); subject.ChunkSource.Should().BeSameAs(mockChunkSource.Object); subject.IsReadOnly.Should().BeFalse(); subject.Length.Should().Be(0); reflector._chunks.Should().HaveCount(0); reflector._disposed.Should().BeFalse(); reflector._positions.Should().Equal(new[] { 0 }); }
public void constructor_with_chunks_should_initialize_subject() { var chunks = Enumerable.Empty <IBsonChunk>(); var subject = new MultiChunkBuffer(chunks, 0, false); var reflector = new Reflector(subject); subject.Capacity.Should().Be(0); subject.ChunkSource.Should().BeNull(); subject.IsReadOnly.Should().BeFalse(); subject.Length.Should().Be(0); reflector._chunks.Should().HaveCount(0); reflector._disposed.Should().BeFalse(); reflector._positions.Should().Equal(new[] { 0 }); }
public void ChunkSource_get_should_return_expected_result( [Values(false, true)] bool disposed) { var chunkSource = Substitute.For <IBsonChunkSource>(); var subject = new MultiChunkBuffer(chunkSource); if (disposed) { subject.Dispose(); } var result = subject.ChunkSource; result.Should().BeSameAs(chunkSource); }
public void ChunkSource_get_should_return_expected_result( [Values(false, true)] bool disposed) { var mockChunkSource = new Mock <IBsonChunkSource>(); var subject = new MultiChunkBuffer(mockChunkSource.Object); if (disposed) { subject.Dispose(); } var result = subject.ChunkSource; result.Should().BeSameAs(mockChunkSource.Object); }
/// <summary> /// Reads the message. /// </summary> /// <returns>A message.</returns> public CompressedMessage ReadMessage() { var reader = CreateBinaryReader(); var stream = reader.BsonStream; var messageStartPosition = stream.Position; var messageLength = stream.ReadInt32(); EnsureMessageLengthIsValid(messageLength); var requestId = stream.ReadInt32(); var responseTo = stream.ReadInt32(); var opcode = (Opcode)stream.ReadInt32(); EnsureOpcodeIsValid(opcode); var originalOpcode = (Opcode)stream.ReadInt32(); var uncompressedSize = stream.ReadInt32(); var compressorType = (CompressorType)stream.ReadByte(); var compressor = _compressorSource.Get(compressorType); using (var uncompressedBuffer = new MultiChunkBuffer(new OutputBufferChunkSource(BsonChunkPool.Default))) using (var uncompressedStream = new ByteBufferStream(uncompressedBuffer, ownsBuffer: false)) { uncompressedStream.WriteInt32(uncompressedSize + MessageHeaderLength); uncompressedStream.WriteInt32(requestId); uncompressedStream.WriteInt32(responseTo); uncompressedStream.WriteInt32((int)originalOpcode); compressor.Decompress(stream, uncompressedStream); uncompressedStream.Position = 0; uncompressedBuffer.MakeReadOnly(); var originalMessageEncoderFactory = new BinaryMessageEncoderFactory(uncompressedStream, _encoderSettings, _compressorSource); var originalMessageEncoder = _originalEncoderSelector.GetEncoder(originalMessageEncoderFactory); var originalMessage = originalMessageEncoder.ReadMessage(); return(new CompressedMessage(originalMessage, null, compressorType)); } }
public void Test20KDocument() { // manufacture an approximately 20K document using 200 strings each 100 characters long // it's enough to cause the document to straddle a chunk boundary var document = new BsonDocument(); var value = new string('x', 100); for (int i = 0; i < 200; i++) { var name = i.ToString(); document.Add(name, value); } // round trip tests var bson = document.ToBson(); var rehydrated = BsonSerializer.Deserialize <BsonDocument>(bson); Assert.IsTrue(bson.SequenceEqual(rehydrated.ToBson())); // test failure mode when 20 bytes are truncated from the buffer using (var byteBuffer = new MultiChunkBuffer(BsonChunkPool.Default)) using (var byteBufferStream = new ByteBufferStream(byteBuffer, ownsBuffer: true)) { using (var memoryStream = new MemoryStream(bson)) { memoryStream.CopyTo(byteBufferStream); } byteBufferStream.SetLength(byteBufferStream.Length - 20); byteBufferStream.Position = 0; using (var bsonReader = new BsonBinaryReader(byteBufferStream)) { Assert.Throws <EndOfStreamException>(() => BsonSerializer.Deserialize <BsonDocument>(bsonReader)); } } }
public async Task SendMessagesAsync(IEnumerable <RequestMessage> messages, MessageEncoderSettings messageEncoderSettings, CancellationToken cancellationToken) { Ensure.IsNotNull(messages, nameof(messages)); ThrowIfDisposedOrNotOpen(); var messagesToSend = messages.ToList(); var requestIds = messagesToSend.Select(x => x.RequestId).ToList(); try { if (_sendingMessagesEventHandler != null) { _sendingMessagesEventHandler(new ConnectionSendingMessagesEvent(_connectionId, requestIds)); } cancellationToken.ThrowIfCancellationRequested(); var stopwatch = Stopwatch.StartNew(); var outputBufferChunkSource = new OutputBufferChunkSource(BsonChunkPool.Default); using (var buffer = new MultiChunkBuffer(outputBufferChunkSource)) { using (var stream = new ByteBufferStream(buffer, ownsBuffer: false)) { var encoderFactory = new BinaryMessageEncoderFactory(stream, messageEncoderSettings); foreach (var message in messagesToSend) { if (message.ShouldBeSent == null || message.ShouldBeSent()) { var encoder = message.GetEncoder(encoderFactory); encoder.WriteMessage(message); message.WasSent = true; } // Encoding messages includes serializing the // documents, so encoding message could be expensive // and worthy of us honoring cancellation here. cancellationToken.ThrowIfCancellationRequested(); } buffer.Length = (int)stream.Length; } stopwatch.Stop(); var serializationDuration = stopwatch.Elapsed; stopwatch.Restart(); await SendBufferAsync(buffer, cancellationToken).ConfigureAwait(false); stopwatch.Stop(); if (_sentMessagesEventHandler != null) { _sentMessagesEventHandler(new ConnectionSentMessagesEvent(_connectionId, requestIds, buffer.Length, stopwatch.Elapsed, serializationDuration)); } } } catch (Exception ex) { if (_failedSendingMessagesEvent != null) { _failedSendingMessagesEvent(new ConnectionSendingMessagesFailedEvent(_connectionId, requestIds, ex)); } throw; } }
public Reflector(MultiChunkBuffer instance) { _instance = instance; }