// public methods /// <summary> /// Executes the bulk operation using the default write concern from the collection. /// </summary> /// <returns>A BulkWriteResult.</returns> public BulkWriteResult Execute() { var args = new BulkWriteArgs { IsOrdered = _isOrdered, Requests = _requests }; return(_collection.BulkWrite(args)); }
public void BulkInsertSync(ICollection <T> entities) { var stores = new List <WriteModel <T> >(); stores.AddRange(entities.Select(x => new InsertOneModel <T>(x))); MongoCollection.BulkWrite(stores); }
private BulkWriteResult ExecuteHelper(WriteConcern writeConcern) { if (_hasBeenExecuted) { throw new InvalidOperationException("The bulk write operation has already been executed."); } _hasBeenExecuted = true; var args = new BulkWriteArgs { IsOrdered = _isOrdered, WriteConcern = writeConcern, Requests = _requests }; return(_collection.BulkWrite(args)); }
public void ShowOffDirectBulkWriteToSpecifyOtherArguments() { var args = new BulkWriteArgs { CheckElementNames = false }; var insert = new InsertRequest(typeof(BsonDocument), new BsonDocument { { "$invalid", 1 } }); try { _Collection.BulkWrite(args, insert); } catch (BulkWriteException exception) { WriteErrors(exception); throw; } }
protected void BulkWrite(IEnumerable <T> elements, int bulkSize) { //see:http://stackoverflow.com/questions/8859533/adding-batch-upsert-to-mongodb if (elements == null) { throw new ArgumentNullException("elements"); } if (bulkSize < 1) { throw new InvalidOperationException("bulkSize is small than 1."); } var cache = new WriteModel <T> [bulkSize]; var options = new BulkWriteOptions { BypassDocumentValidation = true, IsOrdered = false }; var index = 0; foreach (var item in elements) { cache[index % BulkSize] = new InsertOneModel <T>(item); if (((index + 1) % bulkSize) == 0) { MongoCollection.BulkWrite(cache, options); } index++; } }