protected override void When() { var documentSource = new BatchableSource<BsonDocument>(new[] { _document }); var operation = new InsertOpcodeOperation<BsonDocument>(CollectionNamespace, documentSource, BsonDocumentSerializer.Instance, MessageEncoderSettings); _result = ExecuteOperationAsync(operation).GetAwaiter().GetResult().First(); }
public void Constructor_with_enumerator_argument_should_initialize_instance() { var items = new List<int> { 1, 2 }; var subject = new BatchableSource<int>(items.GetEnumerator()); subject.Batch.Should().BeNull(); subject.HasMore.Should().BeTrue(); }
public InsertOpcodeOperationTests() { _documentSource = new BatchableSource<BsonDocument>(new[] { BsonDocument.Parse("{_id: 1, x: 1}") }); }
public void ClearBatch_should_clear_batch() { var subject = new BatchableSource<int>(new[] { 1, 2 }); subject.Batch.Should().NotBeNull(); subject.ClearBatch(); subject.Batch.Should().BeNull(); }
public void SetUp() { _documentSource = new BatchableSource<BsonDocument>(new[] { BsonDocument.Parse("{_id: 1, x: 1}") }); }
public void Constructor_with_enumerable_argument_should_initialize_instance() { var items = new List<int> { 1, 2 }; var subject = new BatchableSource<int>(items); subject.Batch.Should().Equal(items); subject.HasMore.Should().BeFalse(); }
public void EndBatch_with_no_overflow_should_set_batch_and_set_HashMore_to_false() { var subject = new BatchableSource<int>(Enumerable.Empty<int>().GetEnumerator()); subject.Batch.Should().BeNull(); subject.HasMore.Should().BeTrue(); var batch = new int[] { 1, 2 }; subject.EndBatch(batch); subject.Batch.Should().BeSameAs(batch); subject.HasMore.Should().BeFalse(); }
private Type1CommandMessageSection <BsonDocument> CreateType1Section( string identifier = null, BsonDocument[] documents = null, bool canBeSplit = false) { identifier = identifier ?? "id"; documents = documents ?? new BsonDocument[0]; var batch = new BatchableSource <BsonDocument>(documents, canBeSplit: canBeSplit); return(new Type1CommandMessageSection <BsonDocument>(identifier, batch, BsonDocumentSerializer.Instance, NoOpElementNameValidator.Instance, null, null)); }
public override void Serialize(BsonSerializationContext context, BatchableSource <WriteRequest> requestSource) { if (requestSource.Batch == null) { SerializeNextBatch(context, requestSource); } else { SerializeSingleBatch(context, requestSource); } }
protected override void When() { var documentSource = new BatchableSource <BsonDocument>(new[] { _document }); var operation = new InsertOpcodeOperation <BsonDocument>( DatabaseName, CollectionName, BsonDocumentSerializer.Instance, documentSource); _result = ExecuteOperationAsync(operation).GetAwaiter().GetResult(); }
// constructors /// <summary> /// Initializes a new instance of the <see cref="RetryableInsertCommandOperation{TDocument}"/> class. /// </summary> /// <param name="collectionNamespace">The collection namespace.</param> /// <param name="documents">The documents.</param> /// <param name="documentSerializer">The document serializer.</param> /// <param name="messageEncoderSettings">The message encoder settings.</param> public RetryableInsertCommandOperation( CollectionNamespace collectionNamespace, BatchableSource <TDocument> documents, IBsonSerializer <TDocument> documentSerializer, MessageEncoderSettings messageEncoderSettings) : base(Ensure.IsNotNull(collectionNamespace, nameof(collectionNamespace)).DatabaseNamespace, messageEncoderSettings) { _collectionNamespace = Ensure.IsNotNull(collectionNamespace, nameof(collectionNamespace)); _documents = Ensure.IsNotNull(documents, nameof(documents)); _documentSerializer = Ensure.IsNotNull(documentSerializer, nameof(documentSerializer)); }
private async Task Insert(IWriteBinding binding, BsonDocument document) { var documentSource = new BatchableSource <BsonDocument>(new[] { document }); var insertOp = new InsertOpcodeOperation <BsonDocument>(_collection, documentSource, BsonDocumentSerializer.Instance, _messageEncoderSettings); using (var timeout = new CancellationTokenSource(TimeSpan.FromSeconds(30))) using (var linked = CancellationTokenSource.CreateLinkedTokenSource(timeout.Token, _cancellationTokenSource.Token)) { await insertOp.ExecuteAsync(binding, linked.Token); } }
// constructors public InsertOpcodeOperationEmulator( CollectionNamespace collectionNamespace, IBsonSerializer <TDocument> serializer, BatchableSource <TDocument> documentSource, MessageEncoderSettings messageEncoderSettings) { _collectionNamespace = Ensure.IsNotNull(collectionNamespace, "collectionNamespace"); _serializer = Ensure.IsNotNull(serializer, "serializer"); _documentSource = Ensure.IsNotNull(documentSource, "documentSource"); _messageEncoderSettings = messageEncoderSettings; }
protected override void When() { var documentSource = new BatchableSource<BsonDocument>(new[] { _document }); var operation = new InsertOpcodeOperation<BsonDocument>( DatabaseName, CollectionName, BsonDocumentSerializer.Instance, documentSource); _result = ExecuteOperationAsync(operation).GetAwaiter().GetResult(); }
public void EndBatch_with_overflow_should_set_batch_and_set_HashMore_to_true() { var subject = new BatchableSource<int>(Enumerable.Empty<int>().GetEnumerator()); subject.Batch.Should().BeNull(); subject.HasMore.Should().BeTrue(); var batch = new int[] { 1, 2 }; var overflow = new BatchableSource<int>.Overflow { Item = 3, State = 4 }; subject.EndBatch(batch, overflow); subject.Batch.Should().BeSameAs(batch); subject.HasMore.Should().BeTrue(); subject.StartBatch().Should().BeSameAs(overflow); }
// methods public InsertMessage <TDocument> ReadMessage() { if (_jsonReader == null) { throw new InvalidOperationException("No jsonReader was provided."); } var messageContext = BsonDeserializationContext.CreateRoot <BsonDocument>(_jsonReader); var messageDocument = BsonDocumentSerializer.Instance.Deserialize(messageContext); var opcode = messageDocument["opcode"].AsString; if (opcode != "insert") { throw new FormatException("Opcode is not insert."); } var requestId = messageDocument["requestId"].ToInt32(); var databaseName = messageDocument["database"].AsString; var collectionName = messageDocument["collection"].AsString; var maxBatchCount = messageDocument["maxBatchCount"].ToInt32(); var maxMessageSize = messageDocument["maxMessageSize"].ToInt32(); var continueOnError = messageDocument["continueOnError"].ToBoolean(); var documents = messageDocument["documents"]; if (documents.IsBsonNull) { throw new FormatException("InsertMessageJsonEncoder requires documents to not be null."); } var batch = new List <TDocument>(); foreach (BsonDocument serializedDocument in documents.AsBsonArray) { using (var documentReader = new BsonDocumentReader(serializedDocument)) { var documentContext = BsonDeserializationContext.CreateRoot <TDocument>(documentReader); var document = _serializer.Deserialize(documentContext); batch.Add(document); } } var documentSource = new BatchableSource <TDocument>(batch); return(new InsertMessage <TDocument>( requestId, databaseName, collectionName, _serializer, documentSource, maxBatchCount, maxMessageSize, continueOnError)); }
// constructors public Builder(InsertOpcodeOperation <TDocument> original) { _collectionName = original._collectionName; _continueOnError = original._continueOnError; _databaseName = original._databaseName; _documentSource = original._documentSource; _maxBatchCount = original.MaxBatchCount; _maxDocumentSize = original.MaxDocumentSize; _maxMessageSize = original.MaxMessageSize; _serializer = original._serializer; _writeConcern = original.WriteConcern; }
public async Task <BsonDocument> ExecuteAsync(IWriteBinding binding, TimeSpan timeout = default(TimeSpan), CancellationToken cancellationToken = default(CancellationToken)) { Ensure.IsNotNull(binding, "binding"); var indexDocument = CreateIndexDocument(); var documentSource = new BatchableSource <BsonDocument>(new[] { indexDocument }); var operation = new InsertOpcodeOperation(_collectionNamespace.DatabaseNamespace.SystemIndexesCollection, documentSource, _messageEncoderSettings) { WriteConcern = _writeConcern }; var results = await operation.ExecuteAsync(binding, timeout, cancellationToken).ConfigureAwait(false); return(results.First().Response); }
public void ClearBatch_should_clear_batch() { var items = new List<int> { 1, 2 }; var subject = new BatchableSource<int>(items.GetEnumerator()); subject.StartBatch(); subject.MoveNext(); var batch = new[] { subject.Current }; subject.EndBatch(batch); subject.Batch.Should().NotBeNull(); subject.ClearBatch(); subject.Batch.Should().BeNull(); }
public BatchHelper(BatchableSource <TDocument> documentSource, WriteConcern writeConcern, bool continueOnError) { _writeConcern = writeConcern; _continueOnError = continueOnError; _results = writeConcern.IsAcknowledged ? new List <WriteConcernResult>() : null; _batchWriteConcern = writeConcern; _shouldSendGetLastError = null; if (!writeConcern.IsAcknowledged && !continueOnError) { _batchWriteConcern = WriteConcern.Acknowledged; _shouldSendGetLastError = () => documentSource.HasMore; } }
public void constructor_should_initialize_instance() { var identifier = "xyz"; var documents = new BatchableSource <BsonDocument>(new List <BsonDocument>(), canBeSplit: false); var documentSerializer = new BsonDocumentSerializer(); var result = new Type1CommandMessageSection <BsonDocument>(identifier, documents, documentSerializer); result.Documents.Should().BeSameAs(documents); result.DocumentSerializer.Should().BeSameAs(documentSerializer); result.Identifier.Should().BeSameAs(identifier); result.PayloadType.Should().Be(PayloadType.Type1); }
// private methods internal InsertOpcodeOperation <BsonDocument> CreateOperation(SemanticVersion serverVersion, CreateIndexRequest createIndexRequest) { var systemIndexesCollection = _collectionNamespace.DatabaseNamespace.SystemIndexesCollection; var document = createIndexRequest.CreateIndexDocument(serverVersion); document.InsertAt(0, new BsonElement("ns", _collectionNamespace.FullName)); var documentSource = new BatchableSource <BsonDocument>(new[] { document }); return(new InsertOpcodeOperation <BsonDocument>( systemIndexesCollection, documentSource, BsonDocumentSerializer.Instance, _messageEncoderSettings)); }
private async Task <BsonDocument> ExecuteUsingInsertAsync(IChannelSourceHandle channelSource, CancellationToken cancellationToken) { var systemIndexesCollection = _collectionNamespace.DatabaseNamespace.SystemIndexesCollection; foreach (var createIndexRequest in _requests) { var document = createIndexRequest.CreateIndexDocument(); document.InsertAt(0, new BsonElement("ns", _collectionNamespace.FullName)); var documentSource = new BatchableSource <BsonDocument>(new[] { document }); var operation = new InsertOpcodeOperation(systemIndexesCollection, documentSource, _messageEncoderSettings); await operation.ExecuteAsync(channelSource, cancellationToken).ConfigureAwait(false); } return(new BsonDocument("ok", 1)); }
private void SerializeSingleBatch(BsonSerializationContext context, BatchableSource <WriteRequest> requestSource) { _batchStartPosition = (int)context.Writer.Position; // always go one document too far so that we can set IsDone as early as possible foreach (var request in requestSource.Batch) { AddRequest(context, request); if ((_batchCount > _maxBatchCount || _batchLength > _maxBatchLength) && _batchCount > 1) { throw new ArgumentException("The non-batchable requests do not fit in a single write command."); } } }
// methods protected override IWireProtocol<WriteConcernResult> CreateProtocol(IConnectionHandle connection, WriteRequest request) { var insertRequest = (InsertRequest)request; var documentSource = new BatchableSource<BsonDocument>(new[] { insertRequest.Document }); return new InsertWireProtocol<BsonDocument>( CollectionNamespace, WriteConcern, BsonDocumentSerializer.Instance, MessageEncoderSettings, documentSource, connection.Description.MaxBatchCount, connection.Description.MaxMessageSize, continueOnError: false); }
// methods protected override IWireProtocol<BsonDocument> CreateProtocol(IConnectionHandle connection, WriteRequest request) { var insertRequest = (InsertRequest)request; var wrapper = new BsonDocumentWrapper(insertRequest.Document, insertRequest.Serializer); var documentSource = new BatchableSource<BsonDocument>(new[] { wrapper }); return new InsertWireProtocol<BsonDocument>( DatabaseName, CollectionName, WriteConcern, BsonDocumentSerializer.Instance, documentSource, connection.Description.MaxBatchCount, connection.Description.MaxMessageSize, continueOnError: false); }
// methods protected override IWireProtocol <WriteConcernResult> CreateProtocol(IConnectionHandle connection, WriteRequest request) { var insertRequest = (InsertRequest)request; var documentSource = new BatchableSource <BsonDocument>(new[] { insertRequest.Document }); return(new InsertWireProtocol <BsonDocument>( CollectionNamespace, WriteConcern, BsonDocumentSerializer.Instance, MessageEncoderSettings, documentSource, connection.Description.MaxBatchCount, connection.Description.MaxMessageSize, continueOnError: false)); }
// methods protected override IWireProtocol <BsonDocument> CreateProtocol(IConnectionHandle connection, WriteRequest request) { var insertRequest = (InsertRequest)request; var wrapper = new BsonDocumentWrapper(insertRequest.Document, insertRequest.Serializer); var documentSource = new BatchableSource <BsonDocument>(new[] { wrapper }); return(new InsertWireProtocol <BsonDocument>( DatabaseName, CollectionName, WriteConcern, BsonDocumentSerializer.Instance, documentSource, connection.Description.MaxBatchCount, connection.Description.MaxMessageSize, continueOnError: false)); }
// constructors public InsertOpcodeOperationEmulator( CollectionNamespace collectionNamespace, IBsonSerializer <TDocument> serializer, BatchableSource <TDocument> documentSource, MessageEncoderSettings messageEncoderSettings) { _collectionNamespace = Ensure.IsNotNull(collectionNamespace, nameof(collectionNamespace)); _serializer = Ensure.IsNotNull(serializer, nameof(serializer)); _documentSource = Ensure.IsNotNull(documentSource, nameof(documentSource)); _messageEncoderSettings = messageEncoderSettings; if (documentSource.Items.Skip(documentSource.Offset).Take(documentSource.Count).Any(d => d == null)) { throw new ArgumentException("Batch contains one or more null documents."); } }
protected override Task <WriteConcernResult> ExecuteProtocolAsync(IChannelHandle channel, InsertRequest request, CancellationToken cancellationToken) { var documentSource = new BatchableSource <BsonDocument>(new[] { request.Document }); return(channel.InsertAsync( CollectionNamespace, WriteConcern, BsonDocumentSerializer.Instance, MessageEncoderSettings, documentSource, MaxBatchCount, MaxBatchLength, !IsOrdered, // continueOnError null, // shouldSendGetLastError cancellationToken)); }
// constructors public InsertWireProtocol( string databaseName, string collectionName, WriteConcern writeConcern, IBsonSerializer <TDocument> serializer, BatchableSource <TDocument> documentSource, int?maxBatchCount, int?maxMessageSize, bool continueOnError) : base(databaseName, collectionName, writeConcern) { _serializer = Ensure.IsNotNull(serializer, "serializer"); _documentSource = Ensure.IsNotNull(documentSource, "documentSource"); _maxBatchCount = Ensure.IsNullOrGreaterThanOrEqualToZero(maxBatchCount, "maxBatchCount"); _maxMessageSize = Ensure.IsNullOrGreaterThanOrEqualToZero(maxMessageSize, "maxMessageSize"); _continueOnError = continueOnError; }
// constructors public InsertMessage( int requestId, CollectionNamespace collectionNamespace, IBsonSerializer <TDocument> serializer, BatchableSource <TDocument> documentSource, int maxBatchCount, int maxMessageSize, bool continueOnError) : base(requestId) { _collectionNamespace = Ensure.IsNotNull(collectionNamespace, "collectionNamespace"); _serializer = Ensure.IsNotNull(serializer, "serializer"); _documentSource = Ensure.IsNotNull(documentSource, "documentSource"); _maxBatchCount = Ensure.IsGreaterThanZero(maxBatchCount, "maxBatchCount"); _maxMessageSize = Ensure.IsGreaterThanZero(maxMessageSize, "maxMessageSize"); _continueOnError = continueOnError; }
// methods protected override Task<WriteConcernResult> ExecuteProtocolAsync(IChannelHandle channel, WriteRequest request, CancellationToken cancellationToken) { var insertRequest = (InsertRequest)request; var documentSource = new BatchableSource<BsonDocument>(new[] { insertRequest.Document }); return channel.InsertAsync( CollectionNamespace, WriteConcern, BsonDocumentSerializer.Instance, MessageEncoderSettings, documentSource, MaxBatchCount, MaxBatchLength, !IsOrdered, // continueOnError null, // shouldSendGetLastError cancellationToken); }
public void Let_get_and_set_should_work([Values(null, "{ name : 'name' }")] string let) { var requests = new List <DeleteRequest> { new DeleteRequest(new BsonDocument("x", 1)) { Hint = new BsonDocument("_id", 1) } }; var batch = new BatchableSource <DeleteRequest>(requests); var subject = new RetryableDeleteCommandOperation(_collectionNamespace, batch, _messageEncoderSettings); var value = let != null?BsonDocument.Parse(let) : null; subject.Let = value; var result = subject.Let; result.Should().Be(value); }
/// <inheritdoc /> protected override IEnumerable <Type1CommandMessageSection> CreateCommandPayloads(IChannelHandle channel, int attempt) { BatchableSource <DeleteRequest> deletes; if (attempt == 1) { deletes = _deletes; } else { deletes = new BatchableSource <DeleteRequest>(_deletes.Items, _deletes.Offset, _deletes.ProcessedCount, canBeSplit: false); } var maxBatchCount = Math.Min(MaxBatchCount ?? int.MaxValue, channel.ConnectionDescription.MaxBatchCount); var maxDocumentSize = channel.ConnectionDescription.MaxWireDocumentSize; var payload = new Type1CommandMessageSection <DeleteRequest>("deletes", deletes, DeleteRequestSerializer.Instance, NoOpElementNameValidator.Instance, maxBatchCount, maxDocumentSize); return(new Type1CommandMessageSection[] { payload }); }
// constructors public InsertWireProtocol( CollectionNamespace collectionNamespace, WriteConcern writeConcern, IBsonSerializer <TDocument> serializer, MessageEncoderSettings messageEncoderSettings, BatchableSource <TDocument> documentSource, int?maxBatchCount, int?maxMessageSize, bool continueOnError, Func <bool> shouldSendGetLastError = null) : base(collectionNamespace, messageEncoderSettings, writeConcern, shouldSendGetLastError) { _serializer = Ensure.IsNotNull(serializer, "serializer"); _documentSource = Ensure.IsNotNull(documentSource, "documentSource"); _maxBatchCount = Ensure.IsNullOrGreaterThanZero(maxBatchCount, "maxBatchCount"); _maxMessageSize = Ensure.IsNullOrGreaterThanZero(maxMessageSize, "maxMessageSize"); _continueOnError = continueOnError; }
public InsertMessage <TDocument> ReadMessage() { if (_binaryReader == null) { throw new InvalidOperationException("No binaryReader was provided."); } var streamReader = _binaryReader.StreamReader; var startPosition = streamReader.Position; var messageSize = streamReader.ReadInt32(); var requestId = streamReader.ReadInt32(); var responseTo = streamReader.ReadInt32(); var opcode = (Opcode)streamReader.ReadInt32(); var flags = (InsertFlags)streamReader.ReadInt32(); var fullCollectionName = streamReader.ReadCString(); var documents = new List <TDocument>(); while (streamReader.Position < startPosition + messageSize) { var context = BsonDeserializationContext.CreateRoot <TDocument>(_binaryReader); var document = _serializer.Deserialize(context); documents.Add(document); } var firstDot = fullCollectionName.IndexOf('.'); var databaseName = fullCollectionName.Substring(0, firstDot); var collectionName = fullCollectionName.Substring(firstDot + 1); var documentSource = new BatchableSource <TDocument>(documents); var maxBatchCount = 0; var maxMessageSize = 0; var continueOnError = flags.HasFlag(InsertFlags.ContinueOnError); return(new InsertMessage <TDocument>( requestId, databaseName, collectionName, _serializer, documentSource, maxBatchCount, maxMessageSize, continueOnError)); }
private void WriteNextBatch(State state) { var batch = new List <TDocument>(); var message = state.Message; var documentSource = message.DocumentSource; var overflow = documentSource.StartBatch(); if (overflow != null) { batch.Add(overflow.Item); AddDocument(state, (byte[])overflow.State); } // always go one document too far so that we can detect when the docuemntSource runs out of documents while (documentSource.MoveNext()) { var document = documentSource.Current; if (document == null) { throw new ArgumentException("Batch contains one or more null documents."); } var binaryWriter = state.BinaryWriter; var stream = binaryWriter.BsonStream; var documentStartPosition = (int)stream.Position; AddDocument(state, document); if ((state.BatchCount > message.MaxBatchCount || state.MessageSize > message.MaxMessageSize) && state.BatchCount > 1) { var serializedDocument = RemoveLastDocument(state, documentStartPosition); overflow = new BatchableSource <TDocument> .Overflow { Item = document, State = serializedDocument }; documentSource.EndBatch(batch, overflow); return; } batch.Add(document); } documentSource.EndBatch(batch); }
public void constructor_should_initialize_instance( [Values(null, 1, 2)] int?maxBatchCount, [Values(null, 3, 4)] int?maxDocumentSize) { var identifier = "xyz"; var documents = new BatchableSource <BsonDocument>(new List <BsonDocument>(), canBeSplit: false); var documentSerializer = new BsonDocumentSerializer(); var elementNameValidator = Mock.Of <IElementNameValidator>(); var result = new Type1CommandMessageSection <BsonDocument>(identifier, documents, documentSerializer, elementNameValidator, maxBatchCount, maxDocumentSize); result.Documents.Should().BeSameAs(documents); result.DocumentSerializer.Should().BeSameAs(documentSerializer); result.ElementNameValidator.Should().BeSameAs(elementNameValidator); result.Identifier.Should().BeSameAs(identifier); result.PayloadType.Should().Be(PayloadType.Type1); result.MaxBatchCount.Should().Be(maxBatchCount); result.MaxDocumentSize.Should().Be(maxDocumentSize); }
public async Task ExecuteAsync_should_insert_multiple_documents() { var documentSource = new BatchableSource <BsonDocument>(new[] { BsonDocument.Parse("{_id: 1, x: 1}"), BsonDocument.Parse("{_id: 2, x: 2}"), BsonDocument.Parse("{_id: 3, x: 3}"), BsonDocument.Parse("{_id: 4, x: 4}"), }); var subject = new InsertOpcodeOperation <BsonDocument>(_collectionNamespace, documentSource, BsonDocumentSerializer.Instance, _messageEncoderSettings); var result = await ExecuteOperationAsync(subject); result.Should().HaveCount(1); var list = await ReadAllFromCollectionAsync(); list.Should().HaveCount(4); }
// constructors public InsertMessage( int requestId, string databaseName, string collectionName, IBsonSerializer <TDocument> serializer, BatchableSource <TDocument> documentSource, int maxBatchCount, int maxMessageSize, bool continueOnError) : base(requestId) { _databaseName = Ensure.IsNotNullOrEmpty(databaseName, "databaseName"); _collectionName = Ensure.IsNotNullOrEmpty(collectionName, "collectionName"); _serializer = Ensure.IsNotNull(serializer, "serializer"); _documentSource = Ensure.IsNotNull(documentSource, "documentSource"); _maxBatchCount = Ensure.IsGreaterThanOrEqualToZero(maxBatchCount, "maxBatchCount"); _maxMessageSize = Ensure.IsGreaterThanOrEqualToZero(maxMessageSize, "maxMessageSize"); _continueOnError = continueOnError; }
/// <inheritdoc /> protected override IEnumerable <Type1CommandMessageSection> CreateCommandPayloads(IChannelHandle channel, int attempt) { BatchableSource <TDocument> documents; if (attempt == 1) { documents = _documents; } else { documents = new BatchableSource <TDocument>(_documents.Items, _documents.Offset, _documents.ProcessedCount, canBeSplit: false); } var isSystemIndexesCollection = _collectionNamespace.Equals(CollectionNamespace.DatabaseNamespace.SystemIndexesCollection); var elementNameValidator = isSystemIndexesCollection ? (IElementNameValidator)NoOpElementNameValidator.Instance : CollectionElementNameValidator.Instance; var maxBatchCount = Math.Min(MaxBatchCount ?? int.MaxValue, channel.ConnectionDescription.MaxBatchCount); var maxDocumentSize = channel.ConnectionDescription.MaxDocumentSize; var payload = new Type1CommandMessageSection <TDocument>("documents", documents, _documentSerializer, elementNameValidator, maxBatchCount, maxDocumentSize); return(new Type1CommandMessageSection[] { payload }); }
public void Execute_with_hint_should_throw_when_hint_is_not_supported( [Values(0, 1)] int w, [Values(false, true)] bool async) { var writeConcern = new WriteConcern(w); var serverVersion = CoreTestConfiguration.ServerVersion; var requests = new List <UpdateRequest> { new UpdateRequest( UpdateType.Update, new BsonDocument("x", 1), new BsonDocument("$set", new BsonDocument("x", 2))) { Hint = new BsonDocument("_id", 1) } }; var batch = new BatchableSource <UpdateRequest>(requests); var subject = new RetryableUpdateCommandOperation(_collectionNamespace, batch, _messageEncoderSettings) { WriteConcern = writeConcern }; var exception = Record.Exception(() => ExecuteOperation(subject, async, useImplicitSession: true)); if (Feature.HintForUpdateAndReplaceOperations.IsSupported(serverVersion)) { exception.Should().BeNull(); } else if (!writeConcern.IsAcknowledged) { exception.Should().BeOfType <NotSupportedException>(); } else if (Feature.HintForUpdateAndReplaceOperations.DriverMustThrowIfNotSupported(serverVersion)) { exception.Should().BeOfType <NotSupportedException>(); } else { exception.Should().BeOfType <MongoCommandException>(); } }
public void WriteMessage_should_split_batches_when_maxBatchCount_is_reached(int numberOfDocuments, int maxBatchCount, int expectedNumberOfBatches) { var documents = new List<BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource<BsonDocument>(enumerator); var message = new InsertMessage<BsonDocument>(__requestId, __databaseName, __collectionName, __serializer, documentSource, maxBatchCount, __maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List<BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) using (var binaryWriter = new BsonBinaryWriter(stream)) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(null, binaryWriter, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
public void Execute_should_insert_multiple_documents( [Values(false, true)] bool async) { RequireServer.Check(); DropCollection(); var documentSource = new BatchableSource<BsonDocument>(new[] { BsonDocument.Parse("{_id: 1, x: 1}"), BsonDocument.Parse("{_id: 2, x: 2}"), BsonDocument.Parse("{_id: 3, x: 3}"), BsonDocument.Parse("{_id: 4, x: 4}"), }); var subject = new InsertOpcodeOperation<BsonDocument>(_collectionNamespace, documentSource, BsonDocumentSerializer.Instance, _messageEncoderSettings); var result = ExecuteOperation(subject, async); result.Should().HaveCount(1); var list = ReadAllFromCollection(async); list.Should().HaveCount(4); }
public void StartBatch_should_return_and_clear_any_overflow() { var subject = new BatchableSource<int>(Enumerable.Empty<int>().GetEnumerator()); var batch = new int[0]; var overflow = new BatchableSource<int>.Overflow { Item = 1, State = null }; subject.EndBatch(batch, overflow); subject.ClearBatch(); subject.StartBatch().Should().BeSameAs(overflow); subject.StartBatch().Should().BeNull(); }
public void MoveNext_and_Current_should_enumerate_the_items() { var expectedItems = new List<int> { 1, 2 }; var subject = new BatchableSource<int>(expectedItems.GetEnumerator()); var items = new List<int>(); while (subject.MoveNext()) { items.Add(subject.Current); } items.Should().Equal(expectedItems); }
private static Task Insert(IWriteBinding binding, BsonDocument document) { var documentSource = new BatchableSource<BsonDocument>(new[] { document }); var insertOp = new InsertOpcodeOperation<BsonDocument>(__collection, documentSource, BsonDocumentSerializer.Instance, __messageEncoderSettings); return insertOp.ExecuteAsync(binding); }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List<BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource<BsonDocument>(enumerator); var message = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List<BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
private async Task Insert(IWriteBinding binding, BsonDocument document) { var documentSource = new BatchableSource<BsonDocument>(new[] { document }); var insertOp = new InsertOpcodeOperation<BsonDocument>(_collection, documentSource, BsonDocumentSerializer.Instance, _messageEncoderSettings); using (var timeout = new CancellationTokenSource(TimeSpan.FromSeconds(30))) using (var linked = CancellationTokenSource.CreateLinkedTokenSource(timeout.Token, _cancellationTokenSource.Token)) { await insertOp.ExecuteAsync(binding, linked.Token); } }