private void GetNextBatchFromCursor(bool hasMore) { if (!hasMore) { #pragma warning disable 618 throw new GridFSChunkException(_idAsBsonValue, _nextChunkNumber, "missing"); #pragma warning restore } var previousBatch = _batch; _batch = _cursor.Current.ToList(); if (previousBatch != null) { _batchPosition += previousBatch.Count * FileInfo.ChunkSizeBytes;; } var lastChunkInBatch = _batch.Last(); if (lastChunkInBatch["n"].ToInt32() == _lastChunkNumber + 1 && lastChunkInBatch["data"].AsBsonBinaryData.Bytes.Length == 0) { _batch.RemoveAt(_batch.Count - 1); } foreach (var chunk in _batch) { var n = chunk["n"].ToInt32(); var bytes = chunk["data"].AsBsonBinaryData.Bytes; if (n != _nextChunkNumber) { #pragma warning disable 618 throw new GridFSChunkException(_idAsBsonValue, _nextChunkNumber, "missing"); #pragma warning restore } _nextChunkNumber++; var expectedChunkSize = n == _lastChunkNumber ? _lastChunkSize : FileInfo.ChunkSizeBytes; if (bytes.Length != expectedChunkSize) { #pragma warning disable 618 throw new GridFSChunkException(_idAsBsonValue, _nextChunkNumber, "the wrong size"); #pragma warning restore } if (_checkMD5) { _md5.AppendData(bytes, 0, bytes.Length); } } }
private IEnumerable <BsonDocument> CreateWriteBatchChunkDocuments() { var chunkDocuments = new List <BsonDocument>(); var n = (int)(_batchPosition / _chunkSizeBytes); foreach (var chunk in _batch) { var chunkDocument = new BsonDocument { { "_id", ObjectId.GenerateNewId() }, { "files_id", _idAsBsonValue }, { "n", n++ }, { "data", new BsonBinaryData(chunk, BsonBinarySubType.Binary) } }; chunkDocuments.Add(chunkDocument); _batchPosition += chunk.Length; _md5.AppendData(chunk, 0, chunk.Length); } return(chunkDocuments); }