private BsonDocument GenerateResumeAfterToken(bool async, bool shouldBeEmpty = false) { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings); subject.BatchSize = 2; using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Insert("{ a : 1 }"); Insert("{ b : 2 }"); Insert("{ c : 2 }"); Insert("{ d : 4 }"); enumerator.MoveNext(); if (shouldBeEmpty) { enumerator.MoveNext(); enumerator.MoveNext(); enumerator.MoveNext(); } return(enumerator.Current.ResumeToken); } }
public void GetResumeToken_should_return_expected_results_when_batch_is_not_empty_and_has_not_been_iterated( [Values(false, true)] bool async) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet); var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = 2 }; EnsureDatabaseExists(); DropCollection(); subject.ResumeAfter = GenerateResumeAfterToken(async); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { // The batch is not empty. // The batch hasn’t been iterated at all. // Only the initial aggregate command has been executed. var resumeToken = cursor.GetResumeToken(); resumeToken.Should().Be(subject.ResumeAfter); enumerator.MoveNext(); enumerator.MoveNext(); // `aggregate` passed enumerator.MoveNext(); // `getMore` resumeToken = cursor.GetResumeToken(); // The batch is not empty. // The batch hasn’t been iterated at all. // The stream has iterated beyond a previous batch and a getMore command has just been executed. if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeToken.Should().Be(postBatchResumeToken); } else { var documentResumeToken = cursor._documentResumeToken(); documentResumeToken.Should().NotBeNull(); resumeToken.Should().Be(documentResumeToken); } } }
public void Execute_should_return_expected_results_for_updates( [Values(ChangeStreamFullDocumentOption.Default, ChangeStreamFullDocumentOption.UpdateLookup)] ChangeStreamFullDocumentOption fullDocument, [Values(false, true)] bool async) { RequireServer.Check().Supports(Feature.ChangeStreamStage).ClusterTypes(ClusterType.ReplicaSet); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"update\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { FullDocument = fullDocument }; EnsureDatabaseExists(); DropCollection(); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Insert("{ _id : 1, x : 1 }"); Update("{ _id : 1 }", "{ $set : { x : 2 } }"); enumerator.MoveNext().Should().BeTrue(); var change = enumerator.Current; change.OperationType.Should().Be(ChangeStreamOperationType.Update); change.CollectionNamespace.Should().Be(_collectionNamespace); change.DocumentKey.Should().Be("{ _id : 1 }"); change.FullDocument.Should().Be(fullDocument == ChangeStreamFullDocumentOption.Default ? null : "{ _id : 1, x : 2 }"); change.RenameTo.Should().BeNull(); change.ResumeToken.Should().NotBeNull(); change.UpdateDescription.RemovedFields.Should().BeEmpty(); change.UpdateDescription.UpdatedFields.Should().Be("{ x : 2 }"); } }
public void Execute_should_return_expected_results_for_inserts( [Values(false, true)] bool async) { RequireServer.Check().Supports(Feature.ChangeStreamStage).ClusterTypes(ClusterType.ReplicaSet, ClusterType.Sharded); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings); DropCollection(); Insert("{ _id : 1, x : 1 }"); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Update("{ _id : 1 }", "{ $set : { x : 2 } }"); Insert("{ _id : 2, x : 2 }"); enumerator.MoveNext().Should().BeTrue(); var change = enumerator.Current; change.OperationType.Should().Be(ChangeStreamOperationType.Insert); change.CollectionNamespace.Should().Be(_collectionNamespace); change.DocumentKey.Should().Be("{ _id : 2 }"); change.FullDocument.Should().Be("{ _id : 2, x : 2 }"); change.ResumeToken.Should().NotBeNull(); change.UpdateDescription.Should().BeNull(); } }
public void MoveNext_should_return_expected_result_when_there_are_two_batches() { var mockCursor = new Mock <IAsyncCursor <BsonDocument> >(); var firstBatch = new[] { new BsonDocument("_id", 0), new BsonDocument("_id", 1) }; var secondBatch = new[] { new BsonDocument("_id", 2) }; mockCursor.SetupSequence(c => c.MoveNext(CancellationToken.None)).Returns(true).Returns(true).Returns(false); mockCursor.SetupSequence(c => c.Current).Returns(firstBatch).Returns(secondBatch); var subject = new AsyncCursorEnumerator <BsonDocument>(mockCursor.Object, CancellationToken.None); subject.MoveNext().Should().BeTrue(); subject.MoveNext().Should().BeTrue(); subject.MoveNext().Should().BeTrue(); subject.MoveNext().Should().BeFalse(); }
public void MoveNext_should_return_expected_result_when_there_are_two_batches() { var cursor = Substitute.For <IAsyncCursor <BsonDocument> >(); var firstBatch = new[] { new BsonDocument("_id", 0), new BsonDocument("_id", 1) }; var secondBatch = new[] { new BsonDocument("_id", 2) }; cursor.MoveNext().Returns(true, true, false); cursor.Current.Returns <IEnumerable <BsonDocument> >(firstBatch, secondBatch); var subject = new AsyncCursorEnumerator <BsonDocument>(cursor, CancellationToken.None); subject.MoveNext().Should().BeTrue(); subject.MoveNext().Should().BeTrue(); subject.MoveNext().Should().BeTrue(); subject.MoveNext().Should().BeFalse(); }
public void GetResumeToken_should_return_expected_results_when_batch_is_empty_or_fully_iterated( [Values(false, true)] bool async, [Values(false, true)] bool withResumeAfter) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet); var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = 2 }; EnsureDatabaseExists(); DropCollection(); if (withResumeAfter) { subject.ResumeAfter = GenerateResumeAfterToken(async, true); } using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { var resumeResult = cursor.GetResumeToken(); // the batch is empty if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeResult.Should().Be(postBatchResumeToken); } else { if (withResumeAfter) { resumeResult.Should().Be(subject.ResumeAfter); } else { resumeResult.Should().BeNull(); } } // the batch has been iterated to the last document Insert("{ a : 1 }"); enumerator.MoveNext(); resumeResult = cursor.GetResumeToken(); if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeResult.Should().Be(postBatchResumeToken); } else { var documentResumeToken = cursor._documentResumeToken(); documentResumeToken.Should().NotBeNull(); resumeResult.Should().Be(documentResumeToken); } } }