public static BsonDocument Deserialize(byte[] bson) { if (bson == null || bson.Length == 0) throw new ArgumentNullException("bson"); var reader = new BsonReader(); return reader.Deserialize(bson); }
public static BsonDocument Deserialize(byte[] bson) { if (bson == null || bson.Length == 0) { throw new ArgumentNullException("bson"); } var reader = new BsonReader(); return(reader.Deserialize(bson)); }
public static BsonDocument Deserialize(byte[] bson, bool utcDate = false) { if (bson == null || bson.Length == 0) { throw new ArgumentNullException(nameof(bson)); } var reader = new BsonReader(utcDate); return(reader.Deserialize(bson)); }
public static BsonDocument Deserialize(byte[] bson) { if (bson == null || bson.Length == 0) throw new ArgumentNullException("bson"); using (var mem = new MemoryStream(bson)) { var reader = new BsonReader(); return reader.Deserialize(mem); } }
public static BsonDocument Deserialize(byte[] bson) { if (bson == null || bson.Length == 0) { throw new ArgumentNullException("bson"); } using (var mem = new MemoryStream(bson)) { var reader = new BsonReader(); return(reader.Deserialize(mem)); } }
/// <summary> /// Fetch documents from enumerator and add to buffer. If cache recycle, stop read to execute in another read /// </summary> public void FetchCacheIDs(TransactionService trans, DataService data, BsonReader bsonReader) { // empty document buffer this.Clean(); // while until must cache not recycle while (trans.CheckPoint() == false) { // read next node this.HasMore = _nodes.MoveNext(); // if finish, exit loop if (this.HasMore == false) { return; } // get current node var node = _nodes.Current; // read document from data block var buffer = data.Read(node.DataBlock); var doc = bsonReader.Deserialize(buffer).AsDocument; // if need run in full scan, execute full scan and test return if (_query.UseFilter) { // execute query condition here - if false, do not add on final results if (_query.FilterDocument(doc) == false) { continue; } } // increment position cursor _position++; string id = doc[_LITEDB_CONST.FIELD_ID].AsString; if (!this.CacheIDs.ContainsKey(id)) { this.CacheIDs.Add(id, doc); } } }
/// <summary> /// Fetch documents from enumerator and add to buffer. If cache recycle, stop read to execute in another read /// </summary> public void Fetch(TransactionService trans, DataService data, BsonReader bsonReader) { // empty document buffer this.Documents.Clear(); // while until must cache not recycle while (trans.CheckPoint() == false) { // read next node this.HasMore = _nodes.MoveNext(); // if finish, exit loop if (this.HasMore == false) { return; } // if run ONLY under index, skip/limit before deserialize if (_query.UseIndex && _query.UseFilter == false) { if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; return; } } // get current node var node = _nodes.Current; // read document from data block var buffer = data.Read(node.DataBlock); var doc = bsonReader.Deserialize(buffer).AsDocument; // if need run in full scan, execute full scan and test return if (_query.UseFilter) { // execute query condition here - if false, do not add on final results if (_query.FilterDocument(doc) == false) { continue; } // implement skip/limit after deserialize in full scan if (--_skip >= 0) { continue; } if (--_limit <= -1) { this.HasMore = false; return; } } // increment position cursor _position++; // avoid lock again just to check limit if (_limit == 0) { this.HasMore = false; } this.Documents.Add(doc); } }