public void WriteMessage_should_split_batches_when_maxBatchCount_is_reached(int numberOfDocuments, int maxBatchCount, int expectedNumberOfBatches) { var documents = new List <BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource <BsonDocument>(enumerator); var message = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, maxBatchCount, __maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List <BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
private void OnInsertMessage(InsertMessage msg) { var ie = msg.InsertElement; ie.Parent = this; Children.Add(ie); }
private InsertFlags BuildInsertFlags(InsertMessage <TDocument> message) { var flags = InsertFlags.None; if (message.ContinueOnError) { flags |= InsertFlags.ContinueOnError; } return(flags); }
private void OnInsertMessage(InsertMessage msg) { if (PanelViewModel != null) { var builder = new MeshBuilder(); builder.AddCylinder(msg.Position, msg.Position + msg.Direction * msg.Length, msg.Diameter / 2.0); PanelViewModel.AppendMeshGeometry3D(builder.ToMesh(), msg.Color); } }
public void Constructor_should_initialize_instance() { var subject = new InsertMessage<BsonDocument>(_requestId, _collectionNamespace, _serializer, _documentSource, _maxBatchCount, _maxMessageSize, _continueOnError); subject.CollectionNamespace.Should().Be(_collectionNamespace); subject.ContinueOnError.Should().Be(_continueOnError); subject.DocumentSource.Should().BeSameAs(_documentSource); subject.MaxBatchCount.Should().Be(_maxBatchCount); subject.MaxMessageSize.Should().Be(_maxMessageSize); subject.RequestId.Should().Be(_requestId); subject.Serializer.Should().BeSameAs(_serializer); }
public void WriteMessage_should_encode_flags_correctly(int flags, bool continueOnError) { var message = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, continueOnError); using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); var bytes = stream.ToArray(); bytes[__flagsOffset].Should().Be((byte)flags); } }
static void SendInsertMessage(NetworkStream stream, byte[] proposition, Guid transactionKey) { MessageHeader header = new MessageHeader() { MessageType = MessageType.Insert }; header.SendToStream(stream); var ins = new InsertMessage(); ins.TransactionId = transactionKey; ins.Data = proposition; ins.SendToStream(stream); }
// static constructor static InsertMessageJsonEncoderTests() { __testMessage = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, __continueOnError); __testMessageJson = "{ " + "\"opcode\" : \"insert\", " + "\"requestId\" : 1, " + "\"database\" : \"d\", " + "\"collection\" : \"c\", " + "\"maxBatchCount\" : 1000, " + "\"maxMessageSize\" : 40000000, " + "\"continueOnError\" : true, " + "\"documents\" : [{ \"_id\" : 1 }, { \"_id\" : 2 }]" + " }"; }
/// <summary> /// Inserts documents /// </summary> /// <param retval="documentsToInsert"> /// The documents to insert. /// </param> /// <exception cref="NotSupportedException"> /// </exception> public void Insert(IEnumerable <T> documentsToInsert) { AssertUpdatable(); TrySettingId(documentsToInsert); var insertMessage = new InsertMessage <T>(_connection, FullyQualifiedName, documentsToInsert); insertMessage.Execute(); if (_connection.StrictMode) { var error = _db.LastError(_connection.VerifyWriteCount); if (error.Code > 0) { throw new MongoException(error.Error); } } }
// static constructor static InsertMessageJsonEncoderTests() { __testMessage = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, __continueOnError); __testMessageJson = "{ " + "\"opcode\" : \"insert\", " + "\"requestId\" : 1, " + "\"database\" : \"d\", " + "\"collection\" : \"c\", " + "\"maxBatchCount\" : 1000, " + "\"maxMessageSize\" : 40000000, " + "\"continueOnError\" : true, " + "\"documents\" : [{ \"_id\" : 1 }, { \"_id\" : 2 }]" + " }"; }
/// <summary> /// Writes the message. /// </summary> /// <param name="message">The message.</param> public void WriteMessage(InsertMessage <TDocument> message) { Ensure.IsNotNull(message, nameof(message)); var writer = CreateBinaryWriter(); var stream = writer.BsonStream; var messageStartPosition = (int)stream.Position; stream.WriteInt32(0); // messageSize stream.WriteInt32(message.RequestId); stream.WriteInt32(0); // responseTo stream.WriteInt32((int)Opcode.Insert); stream.WriteInt32((int)BuildInsertFlags(message)); stream.WriteCString(message.CollectionNamespace.FullName); WriteDocuments(writer, messageStartPosition, message); stream.BackpatchSize(messageStartPosition); }
// static constructor static InsertMessageBinaryEncoderTests() { __testMessage = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, __continueOnError); __testMessageBytes = new byte[] { 0, 0, 0, 0, // messageLength 1, 0, 0, 0, // requestId 0, 0, 0, 0, // responseTo 210, 7, 0, 0, // opcode = 2002 1, 0, 0, 0, // flags (byte)'d', (byte)'.', (byte)'c', 0, // fullCollectionName 14, 0, 0, 0, 0x10, (byte)'_', (byte)'i', (byte)'d', 0, 1, 0, 0, 0, 0, // documents[0] 14, 0, 0, 0, 0x10, (byte)'_', (byte)'i', (byte)'d', 0, 2, 0, 0, 0, 0 // documents[1] }; __testMessageBytes[0] = (byte)__testMessageBytes.Length; __flagsOffset = 16; }
// static constructor static InsertMessageBinaryEncoderTests() { __testMessage = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, __continueOnError); __testMessageBytes = new byte[] { 0, 0, 0, 0, // messageLength 1, 0, 0, 0, // requestId 0, 0, 0, 0, // responseTo 210, 7, 0, 0, // opcode = 2002 1, 0, 0, 0, // flags (byte)'d', (byte)'.', (byte)'c', 0, // fullCollectionName 14, 0, 0, 0, 0x10, (byte)'_', (byte)'i', (byte)'d', 0, 1, 0, 0, 0, 0, // documents[0] 14, 0, 0, 0, 0x10, (byte)'_', (byte)'i', (byte)'d', 0, 2, 0, 0, 0, 0 // documents[1] }; __testMessageBytes[0] = (byte)__testMessageBytes.Length; __flagsOffset = 16; }
public void WriteMessage(InsertMessage <TDocument> message) { Ensure.IsNotNull(message, "message"); var binaryWriter = CreateBinaryWriter(); var streamWriter = binaryWriter.StreamWriter; var messageStartPosition = (int)streamWriter.Position; var state = new State { BinaryWriter = binaryWriter, Message = message, MessageStartPosition = messageStartPosition }; streamWriter.WriteInt32(0); // messageSize streamWriter.WriteInt32(message.RequestId); streamWriter.WriteInt32(0); // responseTo streamWriter.WriteInt32((int)Opcode.Insert); streamWriter.WriteInt32((int)BuildInsertFlags(message)); streamWriter.WriteCString(message.CollectionNamespace.FullName); WriteDocuments(state); streamWriter.BackpatchSize(messageStartPosition); }
public void WriteMessage(InsertMessage <TDocument> message) { Ensure.IsNotNull(message, "message"); if (_jsonWriter == null) { throw new InvalidOperationException("No jsonWriter was provided."); } BsonValue documents; if (message.DocumentSource.Batch == null) { documents = BsonNull.Value; } else { var array = new BsonArray(); foreach (var document in message.DocumentSource.Batch) { var wrappedDocument = new BsonDocumentWrapper(document, _serializer); array.Add(wrappedDocument); } documents = array; } var messageDocument = new BsonDocument { { "opcode", "insert" }, { "requestId", message.RequestId }, { "database", message.DatabaseName }, { "collection", message.CollectionName }, { "maxBatchCount", message.MaxBatchCount }, { "maxMessageSize", message.MaxMessageSize }, { "continueOnError", message.ContinueOnError }, { "documents", documents } }; var messageContext = BsonSerializationContext.CreateRoot <BsonDocument>(_jsonWriter); BsonDocumentSerializer.Instance.Serialize(messageContext, messageDocument); }
/// <summary> /// Writes the message. /// </summary> /// <param name="message">The message.</param> public void WriteMessage(InsertMessage <TDocument> message) { Ensure.IsNotNull(message, nameof(message)); var messageDocument = new BsonDocument { { "opcode", "insert" }, { "requestId", message.RequestId }, { "database", message.CollectionNamespace.DatabaseNamespace.DatabaseName }, { "collection", message.CollectionNamespace.CollectionName }, { "maxBatchCount", message.MaxBatchCount }, { "maxMessageSize", message.MaxMessageSize }, { "continueOnError", message.ContinueOnError }, { "documents", WrapDocuments(message) } }; var jsonWriter = CreateJsonWriter(); var messageContext = BsonSerializationContext.CreateRoot(jsonWriter); BsonDocumentSerializer.Instance.Serialize(messageContext, messageDocument); }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List <BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource <BsonDocument>(enumerator); var message = new InsertMessage <BsonDocument>(__requestId, __databaseName, __collectionName, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List <BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) using (var binaryWriter = new BsonBinaryWriter(stream)) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(null, binaryWriter, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
// private methods private BsonArray WrapDocuments(InsertMessage <TDocument> message) { var documentSource = message.DocumentSource; var batchCount = Math.Min(documentSource.Count, message.MaxBatchCount); if (batchCount < documentSource.Count && !documentSource.CanBeSplit) { throw new BsonSerializationException("Batch is too large."); } var wrappedDocuments = new BsonArray(batchCount); for (var i = 0; i < batchCount; i++) { var document = documentSource.Items[documentSource.Offset + i]; var wrappedDocument = new BsonDocumentWrapper(document, _serializer); wrappedDocuments.Add(wrappedDocument); } documentSource.SetProcessedCount(batchCount); return(wrappedDocuments); }
/// <summary> /// Inserts the specified documents. /// </summary> /// <param name="documents">The documents.</param> public void Insert <TElement>(IEnumerable <TElement> documents) { if (documents is Document) { Insert(new[] { (Document)documents }); return; } var rootType = typeof(T); var writerSettings = _configuration.SerializationFactory.GetBsonWriterSettings(rootType); var insertMessage = new InsertMessage(writerSettings) { FullCollectionName = FullName }; var descriptor = _configuration.SerializationFactory.GetObjectDescriptor(rootType); var insertDocument = new List <object>(); foreach (var document in documents) { var id = descriptor.GetPropertyValue(document, "_id"); if (id == null) { descriptor.SetPropertyValue(document, "_id", descriptor.GenerateId(document)); } insertDocument.Add(document); } insertMessage.Documents = insertDocument.ToArray(); try { _connection.SendMessage(insertMessage, DatabaseName); } catch (IOException exception) { throw new MongoConnectionException("Could not insert document, communication failure", _connection, exception); } }
public void WriteMessage(InsertMessage <TDocument> message) { Ensure.IsNotNull(message, "message"); if (_binaryWriter == null) { throw new InvalidOperationException("No binaryWriter was provided."); } var streamWriter = _binaryWriter.StreamWriter; var messageStartPosition = (int)streamWriter.Position; var state = new State { Message = message, MessageStartPosition = messageStartPosition }; streamWriter.WriteInt32(0); // messageSize streamWriter.WriteInt32(message.RequestId); streamWriter.WriteInt32(0); // responseTo streamWriter.WriteInt32((int)Opcode.Insert); streamWriter.WriteInt32((int)BuildInsertFlags(message)); streamWriter.WriteCString(message.DatabaseName + "." + message.CollectionName); WriteDocuments(state); streamWriter.BackpatchSize(messageStartPosition); }
public void Insert(IEnumerable <Document> docs) { InsertMessage im = new InsertMessage(); im.FullCollectionName = this.FullName; List <Document> idocs = new List <Document>(); foreach (Document doc in docs) { if (doc.Contains("_id") == false) { Oid _id = oidGenerator.Generate(); doc.Prepend("_id", _id); } } idocs.AddRange(docs); im.Documents = idocs.ToArray(); try{ this.connection.SendMessage(im); }catch (IOException ioe) { throw new MongoCommException("Could not insert document, communication failure", this.connection, ioe); } }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List <BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; var documentSource = new BatchableSource <BsonDocument>(documents, canBeSplit: true); var message = new InsertMessage <BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List <BsonDocument>(); while (documentSource.Count > 0) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder <BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.GetProcessedItems()); documentSource.AdvancePastProcessedItems(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); }
public void GetEncoder_should_return_encoder() { var mockEncoder = Substitute.For<IMessageEncoder<InsertMessage<BsonDocument>>>(); var mockEncoderFactory = Substitute.For<IMessageEncoderFactory>(); mockEncoderFactory.GetInsertMessageEncoder(_serializer).Returns(mockEncoder); var subject = new InsertMessage<BsonDocument>(_requestId, _databaseName, _collectionName, _serializer, _documentSource, _maxBatchCount, _maxMessageSize, _continueOnError); var encoder = subject.GetEncoder(mockEncoderFactory); encoder.Should().BeSameAs(mockEncoder); }
private void WriteDocuments(BsonBinaryWriter writer, long messageStartPosition, InsertMessage <TDocument> message) { var stream = writer.BsonStream; var context = BsonSerializationContext.CreateRoot(writer); var documentSource = message.DocumentSource; var batchCount = Math.Min(documentSource.Count, message.MaxBatchCount); if (batchCount < documentSource.Count && !documentSource.CanBeSplit) { throw new BsonSerializationException("Batch is too large."); } for (var i = 0; i < batchCount; i++) { var document = documentSource.Items[documentSource.Offset + i]; var documentStartPosition = stream.Position; _serializer.Serialize(context, document); var messageSize = stream.Position - messageStartPosition; if (messageSize > message.MaxMessageSize) { if (i > 0 && documentSource.CanBeSplit) { stream.Position = documentStartPosition; stream.SetLength(documentStartPosition); documentSource.SetProcessedCount(i); return; } else { throw new BsonSerializationException("Batch is too large."); } } } documentSource.SetProcessedCount(batchCount); }
private void WriteDocuments(BsonBinaryWriter writer, long messageStartPosition, InsertMessage <TDocument> message) { var stream = writer.BsonStream; var context = BsonSerializationContext.CreateRoot(writer); var collectionNamespace = message.CollectionNamespace; var isSystemIndexesCollection = collectionNamespace.Equals(collectionNamespace.DatabaseNamespace.SystemIndexesCollection); var elementNameValidator = isSystemIndexesCollection ? (IElementNameValidator)NoOpElementNameValidator.Instance : CollectionElementNameValidator.Instance; writer.PushElementNameValidator(elementNameValidator); try { var documentSource = message.DocumentSource; var batchCount = Math.Min(documentSource.Count, message.MaxBatchCount); if (batchCount < documentSource.Count && !documentSource.CanBeSplit) { throw new BsonSerializationException("Batch is too large."); } for (var i = 0; i < batchCount; i++) { var document = documentSource.Items[documentSource.Offset + i]; var documentStartPosition = stream.Position; _serializer.Serialize(context, document); var messageSize = stream.Position - messageStartPosition; if (messageSize > message.MaxMessageSize) { if (i > 0 && documentSource.CanBeSplit) { stream.Position = documentStartPosition; stream.SetLength(documentStartPosition); documentSource.SetProcessedCount(i); return; } else { throw new BsonSerializationException("Batch is too large."); } } } documentSource.SetProcessedCount(batchCount); } finally { writer.PopElementNameValidator(); } }
public void WriteMessage_should_split_batches_when_maxMessageSize_is_reached(int numberOfDocuments, int maxMessageSizeMultiple, int maxMessageSizeDelta, int expectedNumberOfBatches) { var documents = new List<BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } var documentSize = documents[0].ToBson().Length; var messageSizeWithZeroDocuments = __testMessageBytes.Length - 2 * documentSize; var maxMessageSize = messageSizeWithZeroDocuments + (maxMessageSizeMultiple * documentSize) + maxMessageSizeDelta; using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource<BsonDocument>(enumerator); var message = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, documentSource, __maxBatchCount, maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List<BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
public void GetEncoder_should_return_encoder() { var subject = new InsertMessage<BsonDocument>(_requestId, _collectionNamespace, _serializer, _documentSource, _maxBatchCount, _maxMessageSize, _continueOnError); var stubEncoderFactory = Substitute.For<IMessageEncoderFactory>(); var stubEncoder = Substitute.For<IMessageEncoder>(); stubEncoderFactory.GetInsertMessageEncoder(_serializer).Returns(stubEncoder); var result = subject.GetEncoder(stubEncoderFactory); result.Should().BeSameAs(stubEncoder); }
public void WriteMessage_should_split_batches_when_maxBatchCount_is_reached(int numberOfDocuments, int maxBatchCount, int expectedNumberOfBatches) { var documents = new List<BsonDocument>(numberOfDocuments); for (var i = 0; i < numberOfDocuments; i++) { documents.Add(new BsonDocument("_id", i)); } using (var enumerator = documents.GetEnumerator()) { var documentSource = new BatchableSource<BsonDocument>(enumerator); var message = new InsertMessage<BsonDocument>(__requestId, __databaseName, __collectionName, __serializer, documentSource, maxBatchCount, __maxMessageSize, __continueOnError); var numberOfBatches = 0; var batchedDocuments = new List<BsonDocument>(); while (documentSource.HasMore) { using (var stream = new MemoryStream()) using (var binaryWriter = new BsonBinaryWriter(stream)) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(null, binaryWriter, __serializer); subject.WriteMessage(message); } numberOfBatches++; batchedDocuments.AddRange(documentSource.Batch); documentSource.ClearBatch(); } numberOfBatches.Should().Be(expectedNumberOfBatches); batchedDocuments.Should().Equal(documents); } }
public void WriteMessage_should_encode_flags_correctly(int flags, bool continueOnError) { var message = new InsertMessage<BsonDocument>(__requestId, __collectionNamespace, __serializer, __documentSource, __maxBatchCount, __maxMessageSize, continueOnError); using (var stream = new MemoryStream()) { var subject = new InsertMessageBinaryEncoder<BsonDocument>(stream, __messageEncoderSettings, __serializer); subject.WriteMessage(message); var bytes = stream.ToArray(); bytes[__flagsOffset].Should().Be((byte)flags); } }
public void GetEncoder_should_return_encoder() { var subject = new InsertMessage<BsonDocument>(_requestId, _collectionNamespace, _serializer, _documentSource, _maxBatchCount, _maxMessageSize, _continueOnError); var mockEncoderFactory = new Mock<IMessageEncoderFactory>(); var encoder = new Mock<IMessageEncoder>().Object; mockEncoderFactory.Setup(f => f.GetInsertMessageEncoder(_serializer)).Returns(encoder); var result = subject.GetEncoder(mockEncoderFactory.Object); result.Should().BeSameAs(encoder); }