public CollectionService(PageService pager, IndexService indexer, DataService data, TransactionService trans, Logger log) { _pager = pager; _indexer = indexer; _data = data; _trans = trans; _log = log; }
/// <summary> /// Create instances for all engine services /// </summary> private void InitializeServices() { _pager = new PageService(_disk, _crypto, _log); _indexer = new IndexService(_pager, _log); _data = new DataService(_pager, _log); _trans = new TransactionService(_disk, _crypto, _pager, _cacheSize, _log); _collections = new CollectionService(_pager, _indexer, _data, _trans, _log); }
/// <summary> /// Fetch documents from enumerator and add to buffer. If cache recycle, stop read to execute in another read /// </summary> public void Fetch(TransactionService trans, DataService data, BsonReader bsonReader) { // empty document buffer this.Documents.Clear(); // while until must cache not recycle while (trans.CheckPoint() == false) { // read next node this.HasMore = _nodes.MoveNext(); // if finish, exit loop if (this.HasMore == false) { return; } // if run ONLY under index, skip/limit before deserialize if (_query.UseIndex && _query.UseFilter == false) { if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; return; } } // get current node var node = _nodes.Current; // read document from data block var buffer = data.Read(node.DataBlock); var doc = bsonReader.Deserialize(buffer).AsDocument; // if need run in full scan, execute full scan and test return if (_query.UseFilter) { // execute query condition here - if false, do not add on final results if (_query.FilterDocument(doc) == false) { continue; } // implement skip/limit after deserialize in full scan if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; return; } } // increment position cursor _position++; // avoid lock again just to check limit if (_limit == 0) { this.HasMore = false; } this.Documents.Add(doc); } }
public IEnumerable <BsonDocument> GetDocuments(TransactionService trans, DataService data, Logger log) { if (_skip > 0) { log.Write(Logger.QUERY, "skiping {0} documents", _skip); } // while until must cache not recycle while (trans.CheckPoint() == false) { // read next node this.HasMore = this.Nodes.MoveNext(); // if finish, exit loop if (this.HasMore == false) { yield break; } // if run ONLY under index, skip/limit before deserialize if (_query.UseIndex && _query.UseFilter == false) { if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; yield break; } } // get current node var node = this.Nodes.Current; // read document from data block var buffer = data.Read(node.DataBlock); var doc = BsonSerializer.Deserialize(buffer).AsDocument; // if need run in full scan, execute full scan and test return if (_query.UseFilter) { // execute query condition here - if false, do not add on final results if (_query.FilterDocument(doc) == false) { continue; } // implement skip/limit after deserialize in full scan if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; yield break; } } // increment position cursor _position++; // avoid lock again just to check limit if (_limit == 0) { this.HasMore = false; } yield return(doc); } }
public IEnumerable <BsonDocument> GetDocuments(TransactionService trans, DataService data, Logger log) { var index = _bufferSize; while (index > 0) { // checks if cache are full trans.CheckPoint(); // read next node this.HasMore = this.Nodes.MoveNext(); // if finish, exit loop if (this.HasMore == false) { yield break; } // if run under index, skip/limit before deserialize if (_query.UseIndex) { if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; yield break; } } // get current node var node = this.Nodes.Current; // read document from data block var buffer = data.Read(node.DataBlock); var doc = BsonSerializer.Deserialize(buffer).AsDocument; // if need run in full scan, execute full scan and test return if (_query.UseFilter) { // execute query condition here - if false, do not add on final results if (_query.FilterDocument(doc) == false) { continue; } // implement skip/limit after deserialize in full scan if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; yield break; } } index--; // increment counter document _counter++; // avoid lock again just to check limit if (_limit == 0) { this.HasMore = false; } yield return(doc); } // for next run, must skip counter because do continue after last _skip = _counter + _initialSkip; }