private void ProcessInsertMessage(RequestMessage message, Queue <RequestMessage> messageQueue, ConnectionId connectionId, InsertMessageBinaryEncoder <RawBsonDocument> encoder, Stopwatch stopwatch) { var commandName = "insert"; var operationId = EventContext.OperationId; var requestId = message.RequestId; var expectedResponseType = ExpectedResponseType.None; int numberOfDocuments = 0; int gleRequestId; WriteConcern writeConcern; if (TryGetWriteConcernFromGLE(messageQueue, out gleRequestId, out writeConcern)) { requestId = gleRequestId; expectedResponseType = ExpectedResponseType.GLE; } if (_startedEvent != null) { // InsertMessage is generic, and we don't know the generic type... // Plus, for this we really want BsonDocuments, not whatever the generic type is. var decodedMessage = encoder.ReadMessage(); var documents = decodedMessage.DocumentSource.GetBatchItems(); numberOfDocuments = documents.Count; try { var command = new BsonDocument { { commandName, decodedMessage.CollectionNamespace.CollectionName }, { "documents", new BsonArray(documents) }, { "ordered", !decodedMessage.ContinueOnError } }; if (writeConcern == null) { command["writeConcern"] = WriteConcern.Unacknowledged.ToBsonDocument(); } else if (!writeConcern.IsServerDefault) { command["writeConcern"] = writeConcern.ToBsonDocument(); } var @event = new CommandStartedEvent( commandName, command, decodedMessage.CollectionNamespace.DatabaseNamespace, operationId, requestId, connectionId); _startedEvent(@event); } finally { foreach (var document in documents) { document.Dispose(); } } } if (_shouldTrackState) { _state.TryAdd(requestId, new CommandState { CommandName = commandName, OperationId = operationId, Stopwatch = stopwatch, ExpectedResponseType = expectedResponseType, NumberOfInsertedDocuments = numberOfDocuments }); } }
public void WriteMessage_should_write_a_message() { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(__testMessage); var bytes = stream.ToArray(); bytes.Should().Equal(__testMessageBytes); } }
private void ProcessInsertMessage(RequestMessage message, Queue<RequestMessage> messageQueue, ConnectionId connectionId, InsertMessageBinaryEncoder<RawBsonDocument> encoder, Stopwatch stopwatch) { var commandName = "insert"; var operationId = EventContext.OperationId; var requestId = message.RequestId; var expectedResponseType = ExpectedResponseType.None; int numberOfDocuments = 0; int gleRequestId; WriteConcern writeConcern; if (TryGetWriteConcernFromGLE(messageQueue, out gleRequestId, out writeConcern)) { requestId = gleRequestId; expectedResponseType = ExpectedResponseType.GLE; } if (_startedEvent != null) { // InsertMessage is generic, and we don't know the generic type... // Plus, for this we really want BsonDocuments, not whatever the generic type is. var decodedMessage = encoder.ReadMessage(); var documents = decodedMessage.DocumentSource.GetRemainingItems().ToList(); numberOfDocuments = documents.Count; try { var command = new BsonDocument { { commandName, decodedMessage.CollectionNamespace.CollectionName }, { "documents", new BsonArray(documents) }, { "ordered", !decodedMessage.ContinueOnError } }; if (writeConcern == null) { command["writeConcern"] = WriteConcern.Unacknowledged.ToBsonDocument(); } else if (!writeConcern.IsServerDefault) { command["writeConcern"] = writeConcern.ToBsonDocument(); } var @event = new CommandStartedEvent( commandName, command, decodedMessage.CollectionNamespace.DatabaseNamespace, operationId, requestId, connectionId); _startedEvent(@event); } finally { documents.ForEach(d => d.Dispose()); } } if (_shouldTrackState) { _state.TryAdd(requestId, new CommandState { CommandName = commandName, OperationId = operationId, Stopwatch = stopwatch, ExpectedResponseType = expectedResponseType, NumberOfInsertedDocuments = numberOfDocuments }); } }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List<BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource<BsonDocument>(enumerator); var message = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List<BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
public void WriteMessage_should_throw_if_message_is_null() { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); Action action = () => subject.WriteMessage(null); action.ShouldThrow<ArgumentNullException>(); } }
public void WriteMessage_should_encode_flags_correctly(int flags, bool continueOnError) { var message = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, continueOnError); using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); var bytes = stream.ToArray(); bytes[__flagsOffset].Should().Be((byte)flags); } }
public void ReadMessage_should_read_a_message() { using (var stream = new MemoryStream(__testMessageBytes)) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); var message = subject.ReadMessage(); message.CollectionNamespace.Should().Be(__collectionNamespace); message.ContinueOnError.Should().Be(__continueOnError); message.DocumentSource.Batch.Should().Equal(__documentSource.Batch); message.MaxBatchCount.Should().Be(0); message.MaxMessageSize.Should().Be(0); message.RequestId.Should().Be(__requestId); message.Serializer.Should().BeSameAs(__serializer); } }
public void ReadMessage_should_decode_flags_correctly(int flags, bool continueOnError) { var bytes = (byte[])__testMessageBytes.Clone(); bytes[__flagsOffset] = (byte)flags; using (var stream = new MemoryStream(bytes)) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); var message = subject.ReadMessage(); message.ContinueOnError.Should().Be(continueOnError); } }
public void WriteMessage_should_throw_if_binaryWriter_was_not_provided() { using (var stream = new MemoryStream()) using (var binaryReader = new BsonBinaryReader(stream)) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(binaryReader, null, __serializer); Action action = () => subject.WriteMessage(__testMessage); action.ShouldThrow<InvalidOperationException>(); } }
public void WriteMessage_should_split_batches_when_maxBatchCount_is_reached(int numberOfDocuments, int maxBatchCount, int expectedNumberOfBatches) { var documents = new List<BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource<BsonDocument>(enumerator); var message = new InsertMessage<BsonDocument>(__requestId, __databaseName, __collectionName, __serializer, documentSource, maxBatchCount, __maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List<BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) using (var binaryWriter = new BsonBinaryWriter(stream)) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(null, binaryWriter, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }