public void Execute_should_return_expected_results_for_inserts( [Values(false, true)] bool async) { RequireServer.Check().Supports(Feature.ChangeStreamStage).ClusterTypes(ClusterType.ReplicaSet, ClusterType.Sharded); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings); DropCollection(); Insert("{ _id : 1, x : 1 }"); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Update("{ _id : 1 }", "{ $set : { x : 2 } }"); Insert("{ _id : 2, x : 2 }"); enumerator.MoveNext().Should().BeTrue(); var change = enumerator.Current; change.OperationType.Should().Be(ChangeStreamOperationType.Insert); change.CollectionNamespace.Should().Be(_collectionNamespace); change.DocumentKey.Should().Be("{ _id : 2 }"); change.FullDocument.Should().Be("{ _id : 2, x : 2 }"); change.ResumeToken.Should().NotBeNull(); change.UpdateDescription.Should().BeNull(); } }
public void Execute_should_return_expected_results_for_updates( [Values(ChangeStreamFullDocumentOption.Default, ChangeStreamFullDocumentOption.UpdateLookup)] ChangeStreamFullDocumentOption fullDocument, [Values(false, true)] bool async) { RequireServer.Check().Supports(Feature.ChangeStreamStage).ClusterTypes(ClusterType.ReplicaSet); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"update\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { FullDocument = fullDocument }; EnsureDatabaseExists(); DropCollection(); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Insert("{ _id : 1, x : 1 }"); Update("{ _id : 1 }", "{ $set : { x : 2 } }"); enumerator.MoveNext().Should().BeTrue(); var change = enumerator.Current; change.OperationType.Should().Be(ChangeStreamOperationType.Update); change.CollectionNamespace.Should().Be(_collectionNamespace); change.DocumentKey.Should().Be("{ _id : 1 }"); change.FullDocument.Should().Be(fullDocument == ChangeStreamFullDocumentOption.Default ? null : "{ _id : 1, x : 2 }"); change.RenameTo.Should().BeNull(); change.ResumeToken.Should().NotBeNull(); change.UpdateDescription.RemovedFields.Should().BeEmpty(); change.UpdateDescription.UpdatedFields.Should().Be("{ x : 2 }"); } }
private BsonDocument GenerateResumeAfterToken(bool async, bool shouldBeEmpty = false) { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings); subject.BatchSize = 2; using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { Insert("{ a : 1 }"); Insert("{ b : 2 }"); Insert("{ c : 2 }"); Insert("{ d : 4 }"); enumerator.MoveNext(); if (shouldBeEmpty) { enumerator.MoveNext(); enumerator.MoveNext(); enumerator.MoveNext(); } return(enumerator.Current.ResumeToken); } }
// private methods private RenderedPipelineDefinition <ChangeStreamDocument <BsonDocument> > RenderPipeline(PipelineDefinition <ChangeStreamDocument <BsonDocument>, ChangeStreamDocument <BsonDocument> > pipeline) { var serializerRegistry = BsonSerializer.SerializerRegistry; var inputSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); return(pipeline.Render(inputSerializer, serializerRegistry)); }
public void ChangeStreamOperation_should_not_calculate_effective_options_for_non_resume_process() { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var resumeAfter = new BsonDocument("a", 1); var startAfter = new BsonDocument("b", 2); var startAtOperationTime = BsonTimestamp.Create(3L); var documentResumeToken = new BsonDocument("c", 3); ChangeStreamOperation <ChangeStreamDocument <BsonDocument> > subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { ResumeAfter = resumeAfter, StartAfter = startAfter, StartAtOperationTime = startAtOperationTime, DocumentResumeToken = documentResumeToken }; var result = subject.CreateChangeStreamStage(false); var changeStream = result.GetValue("$changeStream").AsBsonDocument; changeStream.GetValue("resumeAfter").Should().Be(resumeAfter); changeStream.GetValue("startAfter").Should().Be(startAfter); changeStream.GetValue("startAtOperationTime").Should().Be(startAtOperationTime); }
private ChangeStreamDocument <BsonDocument> ConvertJsonToChangeStreamBsonDocument(string json) { ChangeStreamDocument <BsonDocument> changeStreamDocument; var subject = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); using (var reader = new MongoDB.Bson.IO.JsonReader(json)) { var context = BsonDeserializationContext.CreateRoot(reader); changeStreamDocument = subject.Deserialize(context); } return(changeStreamDocument); }
public void GetResumeToken_should_return_expected_results_when_batch_is_not_empty_and_has_not_been_iterated( [Values(false, true)] bool async) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet); var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = 2 }; EnsureDatabaseExists(); DropCollection(); subject.ResumeAfter = GenerateResumeAfterToken(async); using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { // The batch is not empty. // The batch hasn’t been iterated at all. // Only the initial aggregate command has been executed. var resumeToken = cursor.GetResumeToken(); resumeToken.Should().Be(subject.ResumeAfter); enumerator.MoveNext(); enumerator.MoveNext(); // `aggregate` passed enumerator.MoveNext(); // `getMore` resumeToken = cursor.GetResumeToken(); // The batch is not empty. // The batch hasn’t been iterated at all. // The stream has iterated beyond a previous batch and a getMore command has just been executed. if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeToken.Should().Be(postBatchResumeToken); } else { var documentResumeToken = cursor._documentResumeToken(); documentResumeToken.Should().NotBeNull(); resumeToken.Should().Be(documentResumeToken); } } }
private static string ConvertChangeStreamBsonDocumentToJson(ChangeStreamDocument <BsonDocument> changeStreamDocument) { string json; var subject = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); using (var textWriter = new StringWriter()) using (var writer = new MongoDB.Bson.IO.JsonWriter(textWriter)) { { var context = BsonSerializationContext.CreateRoot(writer); subject.Serialize(context, changeStreamDocument); json = textWriter.ToString(); } } return(json); }
public void Execute_should_return_expected_results_for_large_batch( [Values(1, 2, 3)] int numberOfChunks, [Values(false, true)] bool async) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet, ClusterType.Sharded); EnsureDatabaseExists(); DropCollection(); var pipeline = new[] { BsonDocument.Parse("{ $match : { operationType : \"insert\" } }") }; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { FullDocument = ChangeStreamFullDocumentOption.UpdateLookup }; using (var cursor = ExecuteOperation(subject, async)) { var filler = new string('x', (numberOfChunks - 1) * 65536); var document = new BsonDocument { { "_id", 1 }, { "filler", filler } }; Insert(document); ChangeStreamDocument <BsonDocument> changeStreamDocument; do { if (async) { cursor.MoveNextAsync().GetAwaiter().GetResult(); } else { cursor.MoveNext(); } changeStreamDocument = cursor.Current.FirstOrDefault(); }while (changeStreamDocument == null); changeStreamDocument.FullDocument.Should().Be(document); changeStreamDocument.FullDocumentBeforeChange.Should().BeNull(); } }
public void ChangeStreamOperation_should_have_expected_change_stream_operation_options_for_resume_process_after_resumable_error( string resumeAfterJson, string startAfterJson, object startAtOperationTimeValue, string documentResumeTokenJson, object initialOperationTime, string expectedResumeAfter, object expectedStartAtOperationTimeValue) { var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var resumeAfter = resumeAfterJson != null?BsonDocument.Parse(resumeAfterJson) : null; var startAfter = startAfterJson != null?BsonDocument.Parse(startAfterJson) : null; var startAtOperationTime = startAtOperationTimeValue != null?BsonTimestamp.Create(startAtOperationTimeValue) : null; var documentResumeToken = documentResumeTokenJson != null?BsonDocument.Parse(documentResumeTokenJson) : null; ChangeStreamOperation <ChangeStreamDocument <BsonDocument> > subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { ResumeAfter = resumeAfter, StartAfter = startAfter, StartAtOperationTime = startAtOperationTime, DocumentResumeToken = documentResumeToken }; if (initialOperationTime != null) { subject._initialOperationTime(BsonTimestamp.Create(initialOperationTime)); } var result = subject.CreateChangeStreamStage(true); var changeStream = result.GetValue("$changeStream").AsBsonDocument; changeStream.GetValue("resumeAfter", null).Should().Be(expectedResumeAfter != null ? BsonDocument.Parse(expectedResumeAfter) : null); changeStream.TryGetValue("startAfter", out _).Should().BeFalse(); changeStream.GetValue("startAtOperationTime", null).Should().Be(expectedStartAtOperationTimeValue != null ? BsonTimestamp.Create(expectedStartAtOperationTimeValue) : null); }
public void GetResumeToken_should_return_expected_results_when_batch_is_empty_or_fully_iterated( [Values(false, true)] bool async, [Values(false, true)] bool withResumeAfter) { RequireServer.Check().ClusterTypes(ClusterType.ReplicaSet); var pipeline = new BsonDocument[0]; var resultSerializer = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); var messageEncoderSettings = new MessageEncoderSettings(); var subject = new ChangeStreamOperation <ChangeStreamDocument <BsonDocument> >(_collectionNamespace, pipeline, resultSerializer, messageEncoderSettings) { BatchSize = 2 }; EnsureDatabaseExists(); DropCollection(); if (withResumeAfter) { subject.ResumeAfter = GenerateResumeAfterToken(async, true); } using (var cursor = ExecuteOperation(subject, async)) using (var enumerator = new AsyncCursorEnumerator <ChangeStreamDocument <BsonDocument> >(cursor, CancellationToken.None)) { var resumeResult = cursor.GetResumeToken(); // the batch is empty if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeResult.Should().Be(postBatchResumeToken); } else { if (withResumeAfter) { resumeResult.Should().Be(subject.ResumeAfter); } else { resumeResult.Should().BeNull(); } } // the batch has been iterated to the last document Insert("{ a : 1 }"); enumerator.MoveNext(); resumeResult = cursor.GetResumeToken(); if (Feature.ChangeStreamPostBatchResumeToken.IsSupported(CoreTestConfiguration.MaxWireVersion)) { var postBatchResumeToken = cursor._postBatchResumeToken(); postBatchResumeToken.Should().NotBeNull(); resumeResult.Should().Be(postBatchResumeToken); } else { var documentResumeToken = cursor._documentResumeToken(); documentResumeToken.Should().NotBeNull(); resumeResult.Should().Be(documentResumeToken); } } }