public void constructor_with_database_should_initialize_instance() { var databaseNamespace = new DatabaseNamespace("foo"); var pipeline = new List <BsonDocument> { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = BsonDocumentSerializer.Instance; var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <BsonDocument>(databaseNamespace, pipeline, resultSerializer, messageEncoderSettings); subject.BatchSize.Should().NotHaveValue(); subject.Collation.Should().BeNull(); subject.CollectionNamespace.Should().BeNull(); subject.DatabaseNamespace.Should().Be(databaseNamespace); subject.FullDocument.Should().Be(ChangeStreamFullDocumentOption.Default); subject.MaxAwaitTime.Should().NotHaveValue(); subject.MessageEncoderSettings.Should().BeSameAs(messageEncoderSettings); subject.Pipeline.Should().Equal(pipeline); subject.ReadConcern.Should().Be(ReadConcern.Default); subject.ResultSerializer.Should().BeSameAs(resultSerializer); subject.ResumeAfter.Should().BeNull(); subject.RetryRequested.Should().BeFalse(); subject.StartAfter.Should().BeNull(); subject.StartAtOperationTime.Should().BeNull(); }
public void Execute_should_return_expected_results_for_updates( [Values(ChangeStreamFullDocumentOption.Default, ChangeStreamFullDocumentOption.UpdateLookup)] ChangeStreamFullDocumentOption fullDocument, [Values(false, true)] bool async) { RequireServer.Check().Supports(Feature.ChangeStreamStage).ClusterTypes(ClusterType.ReplicaSet); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"update\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { FullDocument = fullDocument }; EnsureDatabaseExists(); DropCollection(); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Insert("{ _id : 1, x : 1 }"); Update("{ _id : 1 }", "{ $set : { x : 2 } }"); enumerator.MoveNext().Should().BeTrue(); var change = enumerator.Current; change.OperationType.Should().Be(ChangeStreamOperationType.Update); change.CollectionNamespace.Should().Be(_collectionNamespace); change.DocumentKey.Should().Be("{ _id : 1 }"); change.FullDocument.Should().Be(fullDocument == ChangeStreamFullDocumentOption.Default ? null : "{ _id : 1, x : 2 }"); change.RenameTo.Should().BeNull(); change.ResumeToken.Should().NotBeNull(); change.UpdateDescription.RemovedFields.Should().BeEmpty(); change.UpdateDescription.UpdatedFields.Should().Be("{ x : 2 }"); } }
public void Execute_should_return_expected_results_for_inserts( [Values(false, true)] bool async) { RequireServer.Check().Supports(Feature.ChangeStreamStage).ClusterTypes(ClusterType.ReplicaSet, ClusterType.Sharded); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings); DropCollection(); Insert("{ _id : 1, x : 1 }"); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Update("{ _id : 1 }", "{ $set : { x : 2 } }"); Insert("{ _id : 2, x : 2 }"); enumerator.MoveNext().Should().BeTrue(); var change = enumerator.Current; change.OperationType.Should().Be(ChangeStreamOperationType.Insert); change.CollectionNamespace.Should().Be(_collectionNamespace); change.DocumentKey.Should().Be("{ _id : 2 }"); change.FullDocument.Should().Be("{ _id : 2, x : 2 }"); change.ResumeToken.Should().NotBeNull(); change.UpdateDescription.Should().BeNull(); } }
public void ChangeStreamOperation_should_not_calculate_effective_options_for_non_resume_process() { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var resumeAfter = new BsonDocument("a", 1); var startAfter = new BsonDocument("b", 2); var startAtOperationTime = BsonTimestamp.Create(3L); var documentResumeToken = new BsonDocument("c", 3); ChangeStreamOperation <ChangeStreamDocument <BsonDocument> > subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { ResumeAfter = resumeAfter, StartAfter = startAfter, StartAtOperationTime = startAtOperationTime, DocumentResumeToken = documentResumeToken }; var result = subject.CreateChangeStreamStage(false); var changeStream = result.GetValue("$changeStream").AsBsonDocument; changeStream.GetValue("resumeAfter").Should().Be(resumeAfter); changeStream.GetValue("startAfter").Should().Be(startAfter); changeStream.GetValue("startAtOperationTime").Should().Be(startAtOperationTime); }
private BsonDocument GenerateResumeAfterToken(bool async, bool shouldBeEmpty = false) { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings); subject.BatchSize = 2; using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Insert("{ a : 1 }"); Insert("{ b : 2 }"); Insert("{ c : 2 }"); Insert("{ d : 4 }"); enumerator.MoveNext(); if (shouldBeEmpty) { enumerator.MoveNext(); enumerator.MoveNext(); enumerator.MoveNext(); } return(enumerator.Current.ResumeToken); } }
public static AggregateOperation <RawBsonDocument> CreateAggregateOperation( this ChangeStreamOperation <BsonDocument> subject, BsonDocument resumeAfter) { var methodInfo = typeof(ChangeStreamOperation <BsonDocument>).GetMethod("CreateAggregateOperation", BindingFlags.NonPublic | BindingFlags.Instance); return((AggregateOperation <RawBsonDocument>)methodInfo.Invoke(subject, new object[] { resumeAfter })); }
public void CreateAggregateOperation_should_return_expected_result( int?batchSize, string locale, ChangeStreamFullDocumentOption fullDocument, int?maxAwaitTimeMS, ReadConcernLevel level, string resumeAferJson, string startAfterJson, string expectedChangeStreamStageJson) { var collation = locale == null ? null : new Collation(locale); var maxAwaitTime = maxAwaitTimeMS == null ? (TimeSpan?)null : TimeSpan.FromMilliseconds(maxAwaitTimeMS.Value); var readConcern = new ReadConcern(level); var resumeAfter = resumeAferJson == null ? null : BsonDocument.Parse(resumeAferJson); var startAfter = startAfterJson == null ? null : BsonDocument.Parse(startAfterJson); var expectedChangeStreamStage = BsonDocument.Parse(expectedChangeStreamStageJson); var collectionNamespace = new CollectionNamespace(new DatabaseNamespace("foo"), "bar"); var pipeline = new List <BsonDocument> { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = BsonDocumentSerializer.Instance; var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <BsonDocument>(collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = batchSize, Collation = collation, FullDocument = fullDocument, MaxAwaitTime = maxAwaitTime, ReadConcern = readConcern, ResumeAfter = resumeAfter, StartAfter = startAfter }; var expectedPipeline = new BsonDocument[] { expectedChangeStreamStage, pipeline[0] }; var result = subject.CreateAggregateOperation(); result.AllowDiskUse.Should().NotHaveValue(); result.BatchSize.Should().Be(batchSize); result.Collation.Should().Be(collation); result.CollectionNamespace.Should().Be(collectionNamespace); result.MaxAwaitTime.Should().Be(maxAwaitTime); result.MaxTime.Should().NotHaveValue(); result.MessageEncoderSettings.Should().BeSameAs(messageEncoderSettings); result.Pipeline.Should().Equal(expectedPipeline); result.ReadConcern.Should().Be(readConcern); result.ResultSerializer.Should().Be(RawBsonDocumentSerializer.Instance); result.RetryRequested.Should().BeFalse(); }
public void GetResumeToken_should_return_expected_results_when_batch_is_not_empty_and_has_not_been_iterated( [Values(false, true)] bool async) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet); var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = 2 }; EnsureDatabaseExists(); DropCollection(); subject.ResumeAfter = GenerateResumeAfterToken(async); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { // The batch is not empty. // The batch hasn’t been iterated at all. // Only the initial aggregate command has been executed. var resumeToken = cursor.GetResumeToken(); resumeToken.Should().Be(subject.ResumeAfter); enumerator.MoveNext(); enumerator.MoveNext(); // `aggregate` passed enumerator.MoveNext(); // `getMore` resumeToken = cursor.GetResumeToken(); // The batch is not empty. // The batch hasn’t been iterated at all. // The stream has iterated beyond a previous batch and a getMore command has just been executed. if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeToken.Should().Be(postBatchResumeToken); } else { var documentResumeToken = cursor._documentResumeToken(); documentResumeToken.Should().NotBeNull(); resumeToken.Should().Be(documentResumeToken); } } }
public void Execute_should_return_expected_results_for_large_batch( [Values(1, 2, 3)] int numberOfChunks, [Values(false, true)] bool async) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet, ClusterType.Sharded); EnsureDatabaseExists(); DropCollection(); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { FullDocument = ChangeStreamFullDocumentOption.UpdateLookup }; using (var cursor = ExecuteOperation(subject, async)) { var filler = new string('x', (numberOfChunks - 1) * 65536); var document = new BsonDocument { { "_id", 1 }, { "filler", filler } }; Insert(document); ChangeStreamDocument <BsonDocument> changeStreamDocument; do { if (async) { cursor.MoveNextAsync().GetAwaiter().GetResult(); } else { cursor.MoveNext(); } changeStreamDocument = cursor.Current.FirstOrDefault(); }while (changeStreamDocument == null); changeStreamDocument.FullDocument.Should().Be(document); changeStreamDocument.FullDocumentBeforeChange.Should().BeNull(); } }
public void ChangeStreamOperation_should_have_expected_change_stream_operation_options_for_resume_process_after_resumable_error( string resumeAfterJson, string startAfterJson, object startAtOperationTimeValue, string documentResumeTokenJson, object initialOperationTime, string expectedResumeAfter, object expectedStartAtOperationTimeValue) { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var resumeAfter = resumeAfterJson != null?BsonDocument.Parse(resumeAfterJson) : null; var startAfter = startAfterJson != null?BsonDocument.Parse(startAfterJson) : null; var startAtOperationTime = startAtOperationTimeValue != null?BsonTimestamp.Create(startAtOperationTimeValue) : null; var documentResumeToken = documentResumeTokenJson != null?BsonDocument.Parse(documentResumeTokenJson) : null; ChangeStreamOperation <ChangeStreamDocument <BsonDocument> > subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { ResumeAfter = resumeAfter, StartAfter = startAfter, StartAtOperationTime = startAtOperationTime, DocumentResumeToken = documentResumeToken }; if (initialOperationTime != null) { subject._initialOperationTime(BsonTimestamp.Create(initialOperationTime)); } var result = subject.CreateChangeStreamStage(true); var changeStream = result.GetValue("$changeStream").AsBsonDocument; changeStream.GetValue("resumeAfter", null).Should().Be(expectedResumeAfter != null ? BsonDocument.Parse(expectedResumeAfter) : null); changeStream.TryGetValue("startAfter", out _).Should().BeFalse(); changeStream.GetValue("startAtOperationTime", null).Should().Be(expectedStartAtOperationTimeValue != null ? BsonTimestamp.Create(expectedStartAtOperationTimeValue) : null); }
public static BsonDocument CreateChangeStreamStage(this ChangeStreamOperation <ChangeStreamDocument <BsonDocument> > subject) { return((BsonDocument)Reflector.Invoke(subject, nameof(CreateChangeStreamStage))); }
public static AggregateOperation <RawBsonDocument> CreateAggregateOperation(this ChangeStreamOperation <BsonDocument> subject) { return((AggregateOperation <RawBsonDocument>)Reflector.Invoke(subject, nameof(CreateAggregateOperation))); }
public void GetResumeToken_should_return_expected_results_when_batch_is_empty_or_fully_iterated( [Values(false, true)] bool async, [Values(false, true)] bool withResumeAfter) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet); var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = 2 }; EnsureDatabaseExists(); DropCollection(); if (withResumeAfter) { subject.ResumeAfter = GenerateResumeAfterToken(async, true); } using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { var resumeResult = cursor.GetResumeToken(); // the batch is empty if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeResult.Should().Be(postBatchResumeToken); } else { if (withResumeAfter) { resumeResult.Should().Be(subject.ResumeAfter); } else { resumeResult.Should().BeNull(); } } // the batch has been iterated to the last document Insert("{ a : 1 }"); enumerator.MoveNext(); resumeResult = cursor.GetResumeToken(); if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeResult.Should().Be(postBatchResumeToken); } else { var documentResumeToken = cursor._documentResumeToken(); documentResumeToken.Should().NotBeNull(); resumeResult.Should().Be(documentResumeToken); } } }
public static void _initialOperationTime(this ChangeStreamOperation <ChangeStreamDocument <BsonDocument> > subject, BsonTimestamp value) { Reflector.SetFieldValue(subject, nameof(_initialOperationTime), value); }