public void WriteMessage_should_split_batches_when_maxBatchCount_is_reached(int numberOfDocuments, int maxBatchCount, int expectedNumberOfBatches) { var documents = new List <BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource <BsonDocument>(enumerator); var message = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, maxBatchCount, __maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List <BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
public void WriteMessage_should_throw_if_message_is_null() { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); Action action = () => subject.WriteMessage(null); action.ShouldThrow <ArgumentNullException>(); } }
public void WriteMessage_should_write_a_message() { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(__testMessage); var bytes = stream.ToArray(); bytes.Should().Equal(__testMessageBytes); } }
public void WriteMessage_should_throw_if_binaryWriter_was_not_provided() { using (var stream = new MemoryStream()) using (var binaryReader = new BsonBinaryReader(stream)) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(binaryReader, null, __serializer); Action action = () => subject.WriteMessage(__testMessage); action.ShouldThrow <InvalidOperationException>(); } }
public void WriteMessage_should_encode_flags_correctly(int flags, bool continueOnError) { var message = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, continueOnError); using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); var bytes = stream.ToArray(); bytes[__flagsOffset].Should().Be((byte)flags); } }
public void ReadMessage_should_decode_flags_correctly(int flags, bool continueOnError) { var bytes = (byte[])__testMessageBytes.Clone(); bytes[__flagsOffset] = (byte)flags; using (var stream = new MemoryStream(bytes)) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); var message = subject.ReadMessage(); message.ContinueOnError.Should().Be(continueOnError); } }
public void ReadMessage_should_throw_when_opcode_is_invalid() { var bytes = (byte[])__testMessageBytes.Clone(); bytes[12]++; using (var stream = new MemoryStream(bytes)) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); var exception = Record.Exception(() => subject.ReadMessage()); exception.Should().BeOfType <FormatException>(); exception.Message.Should().Be("Insert message opcode is not OP_INSERT."); } }
public void ReadMessage_should_read_a_message() { using (var stream = new MemoryStream(__testMessageBytes)) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); var message = subject.ReadMessage(); message.CollectionNamespace.Should().Be(__collectionNamespace); message.ContinueOnError.Should().Be(__continueOnError); message.DocumentSource.Batch.Should().Equal(__documentSource.Batch); message.MaxBatchCount.Should().Be(0); message.MaxMessageSize.Should().Be(0); message.RequestId.Should().Be(__requestId); message.Serializer.Should().BeSameAs(__serializer); } }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List <BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource <BsonDocument>(enumerator); var message = new InsertMessage <BsonDocument>(__requestId, __databaseName, __collectionName, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List <BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) using (var binaryWriter = new BsonBinaryWriter(stream)) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(null, binaryWriter, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List <BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; var documentSource = new BatchableSource <BsonDocument>(documents, canBeSplit: true); var message = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List <BsonDocument>(); while (documentSource.Count > 0) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.GetProcessedItems()); documentSource.AdvancePastProcessedItems(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); }