// internal methods internal override void WriteBodyTo(BsonStreamWriter streamWriter) { var processedRequests = new List <InsertRequest>(); var continuationBatch = _batch as ContinuationBatch <InsertRequest, byte[]>; if (continuationBatch != null) { AddOverflow(streamWriter, continuationBatch.PendingState); processedRequests.Add(continuationBatch.PendingItem); continuationBatch.ClearPending(); // so pending objects can be garbage collected sooner } // always go one document too far so that we can set IsDone as early as possible var enumerator = _batch.Enumerator; while (enumerator.MoveNext()) { var request = enumerator.Current; AddRequest(streamWriter, request); if ((_batchCount > _maxBatchCount || _batchLength > _maxBatchLength) && _batchCount > 1) { var serializedDocument = RemoveLastDocument(streamWriter.BaseStream); var nextBatch = new ContinuationBatch <InsertRequest, byte[]>(enumerator, request, serializedDocument); _batchProgress = new BatchProgress <InsertRequest>(_batchCount, _batchLength, processedRequests, nextBatch); return; } processedRequests.Add(request); } _batchProgress = new BatchProgress <InsertRequest>(_batchCount, _batchLength, processedRequests, null); }
// Training monitor private static void TrackBatchProgress(BatchProgress progress) { //Console.SetCursorPosition(0, Console.CursorTop); //var n = (int)(progress.Percentage * 32 / 100); // 32 is the number of progress '=' characters to display //var c = new char[32]; //for (var i = 0; i < 32; i++) c[i] = i <= n ? '=' : ' '; //Console.Write($"[{new string(c)}] "); }
// public methods public override void Serialize(BsonWriter bsonWriter, Type nominalType, object value, IBsonSerializationOptions options) { var batch = (Batch <WriteRequest>)value; var bsonBinaryWriter = (BsonBinaryWriter)bsonWriter; _batchStartPosition = bsonBinaryWriter.Buffer.Position; var processedRequests = new List <WriteRequest>(); var continuationBatch = batch as ContinuationBatch <WriteRequest, IByteBuffer>; if (continuationBatch != null) { AddOverfow(bsonBinaryWriter, continuationBatch.PendingState); processedRequests.Add(continuationBatch.PendingItem); continuationBatch.ClearPending(); // so pending objects can be garbage collected sooner } var maxBatchLength = _args.MaxBatchLength; if (maxBatchLength > _args.MaxDocumentSize) { maxBatchLength = _args.MaxDocumentSize; // not MaxWireDocumentSize! leave room for overhead } // always go one document too far so that we can set IsDone as early as possible var enumerator = batch.Enumerator; while (enumerator.MoveNext()) { var request = enumerator.Current; AddRequest(bsonBinaryWriter, request); if ((_batchCount > _args.MaxBatchCount || _batchLength > maxBatchLength) && _batchCount > 1) { var serializedRequest = RemoveOverflow(bsonBinaryWriter.Buffer); var nextBatch = new ContinuationBatch <WriteRequest, IByteBuffer>(enumerator, request, serializedRequest); _batchProgress = new BatchProgress <WriteRequest>(_batchCount, _batchLength, processedRequests, nextBatch); return; } processedRequests.Add(request); } _batchProgress = new BatchProgress <WriteRequest>(_batchCount, _batchLength, processedRequests, null); }
// public methods public override void Serialize(BsonSerializationContext context, Batch <WriteRequest> batch) { var bsonWriter = (BsonBinaryWriter)context.Writer; _batchStartPosition = (int)bsonWriter.Stream.Position; var processedRequests = new List <WriteRequest>(); var continuationBatch = batch as ContinuationBatch <WriteRequest, IByteBuffer>; if (continuationBatch != null) { AddOverfow(bsonWriter, continuationBatch.PendingState); processedRequests.Add(continuationBatch.PendingItem); continuationBatch.ClearPending(); // so pending objects can be garbage collected sooner } // always go one document too far so that we can set IsDone as early as possible var enumerator = batch.Enumerator; while (enumerator.MoveNext()) { var request = enumerator.Current; AddRequest(bsonWriter, request); if ((_batchCount > _maxBatchCount || _batchLength > _maxBatchLength) && _batchCount > 1) { var serializedRequest = RemoveOverflow(bsonWriter.Stream); var nextBatch = new ContinuationBatch <WriteRequest, IByteBuffer>(enumerator, request, serializedRequest); _batchProgress = new BatchProgress <WriteRequest>(_batchCount, _batchLength, processedRequests, nextBatch); return; } processedRequests.Add(request); } _batchProgress = new BatchProgress <WriteRequest>(_batchCount, _batchLength, processedRequests, null); }
// public methods public override void Serialize(BsonWriter bsonWriter, Type nominalType, object value, IBsonSerializationOptions options) { var batch = (Batch<WriteRequest>)value; var bsonBinaryWriter = (BsonBinaryWriter)bsonWriter; _batchStartPosition = bsonBinaryWriter.Buffer.Position; var processedRequests = new List<WriteRequest>(); var continuationBatch = batch as ContinuationBatch<WriteRequest, IByteBuffer>; if (continuationBatch != null) { AddOverfow(bsonBinaryWriter, continuationBatch.PendingState); processedRequests.Add(continuationBatch.PendingItem); continuationBatch.ClearPending(); // so pending objects can be garbage collected sooner } // always go one document too far so that we can set IsDone as early as possible var enumerator = batch.Enumerator; while (enumerator.MoveNext()) { var request = enumerator.Current; AddRequest(bsonBinaryWriter, request); if ((_batchCount > _maxBatchCount || _batchLength > _maxBatchLength) && _batchCount > 1) { var serializedRequest = RemoveOverflow(bsonBinaryWriter.Buffer); var nextBatch = new ContinuationBatch<WriteRequest, IByteBuffer>(enumerator, request, serializedRequest); _batchProgress = new BatchProgress<WriteRequest>(_batchCount, _batchLength, processedRequests, nextBatch); return; } processedRequests.Add(request); } _batchProgress = new BatchProgress<WriteRequest>(_batchCount, _batchLength, processedRequests, null); }
// internal methods internal override void WriteBodyTo(BsonStreamWriter streamWriter) { var processedRequests = new List<InsertRequest>(); var continuationBatch = _batch as ContinuationBatch<InsertRequest, byte[]>; if (continuationBatch != null) { AddOverflow(streamWriter, continuationBatch.PendingState); processedRequests.Add(continuationBatch.PendingItem); continuationBatch.ClearPending(); // so pending objects can be garbage collected sooner } // always go one document too far so that we can set IsDone as early as possible var enumerator = _batch.Enumerator; while (enumerator.MoveNext()) { var request = enumerator.Current; AddRequest(streamWriter, request); if ((_batchCount > _maxBatchCount || _batchLength > _maxBatchLength) && _batchCount > 1) { var serializedDocument = RemoveLastDocument(streamWriter.BaseStream); var nextBatch = new ContinuationBatch<InsertRequest, byte[]>(enumerator, request, serializedDocument); _batchProgress = new BatchProgress<InsertRequest>(_batchCount, _batchLength, processedRequests, nextBatch); return; } processedRequests.Add(request); } _batchProgress = new BatchProgress<InsertRequest>(_batchCount, _batchLength, processedRequests, null); }