public DataService(Snapshot snapshot) { _snapshot = snapshot; }
public IndexService(Snapshot snapshot, Collation collation) { _snapshot = snapshot; _collation = collation; }
/// <summary> /// Implement internal update document /// </summary> private bool UpdateDocument(Snapshot snapshot, CollectionPage col, BsonDocument doc, IndexService indexer, DataService data) { // normalize id before find var id = doc["_id"]; // validate id for null, min/max values if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } // find indexNode from pk index var pkNode = indexer.Find(col.PK, id, false, LiteDB.Query.Ascending); // if not found document, no updates if (pkNode == null) { return(false); } // update data storage data.Update(col, pkNode.DataBlock, doc); // get all current non-pk index nodes from this data block (slot, key, nodePosition) var oldKeys = indexer.GetNodeList(pkNode.NextNode) .Select(x => new Tuple <byte, BsonValue, PageAddress>(x.Slot, x.Key, x.Position)) .ToArray(); // build a list of all new key index keys var newKeys = new List <Tuple <byte, BsonValue, string> >(); foreach (var index in col.GetCollectionIndexes().Where(x => x.Name != "_id")) { // getting all keys from expression over document var keys = index.BsonExpr.Execute(doc); foreach (var key in keys) { newKeys.Add(new Tuple <byte, BsonValue, string>(index.Slot, key, index.Name)); } } if (oldKeys.Length == 0 && newKeys.Count == 0) { return(true); } // get a list of all nodes that are in oldKeys but not in newKeys (must delete) var toDelete = new HashSet <PageAddress>(oldKeys .Where(x => newKeys.Any(n => n.Item1 == x.Item1 && n.Item2 == x.Item2) == false) .Select(x => x.Item3)); // get a list of all keys that are not in oldKeys (must insert) var toInsert = newKeys .Where(x => oldKeys.Any(o => o.Item1 == x.Item1 && o.Item2 == x.Item2) == false) .ToArray(); // if nothing to change, just exit if (toDelete.Count == 0 && toInsert.Length == 0) { return(true); } // delete nodes and return last keeped node in list var last = indexer.DeleteList(pkNode.Position, toDelete); // now, insert all new nodes foreach (var elem in toInsert) { var index = col.GetCollectionIndex(elem.Item3); last = indexer.AddNode(index, elem.Item2, pkNode.DataBlock, last); } return(true); }
public CollectionService(HeaderPage header, Snapshot snapshot, TransactionPages transPages) { _snapshot = snapshot; _header = header; _transPages = transPages; }
/// <summary> /// INCLUDE: Do include in result document according path expression - Works only with DocumentLookup /// </summary> protected IEnumerable <BsonDocument> Include(IEnumerable <BsonDocument> source, BsonExpression path) { // cached services string last = null; Snapshot snapshot = null; IndexService indexer = null; DataService data = null; CollectionIndex index = null; IDocumentLookup lookup = null; foreach (var doc in source) { foreach (var value in path.Execute(doc, _pragmas.Collation) .Where(x => x.IsDocument || x.IsArray) .ToList()) { // if value is document, convert this ref document into full document (do another query) if (value.IsDocument) { DoInclude(value.AsDocument); } else { // if value is array, do same per item foreach (var item in value.AsArray .Where(x => x.IsDocument) .Select(x => x.AsDocument)) { DoInclude(item); } } } yield return(doc); } void DoInclude(BsonDocument value) { // works only if is a document var refId = value["$id"]; var refCol = value["$ref"]; // if has no reference, just go out if (refId.IsNull || !refCol.IsString) { return; } // do some cache re-using when is same $ref (almost always is the same $ref collection) if (last != refCol.AsString) { last = refCol.AsString; // initialize services snapshot = _transaction.CreateSnapshot(LockMode.Read, last, false); indexer = new IndexService(snapshot, _pragmas.Collation); data = new DataService(snapshot); lookup = new DatafileLookup(data, _pragmas.UtcDate, null); index = snapshot.CollectionPage?.PK; } // fill only if index and ref node exists if (index != null) { var node = indexer.Find(index, refId, false, Query.Ascending); if (node != null) { // load document based on dataBlock position var refDoc = lookup.Load(node); value.Remove("$id"); value.Remove("$ref"); refDoc.CopyTo(value); } else { // set in ref document that was not found value.Add("$missing", true); } } } }
public IndexService(Snapshot snapshot) { _snapshot = snapshot; }
private IEnumerable <BsonDocument> GetList(uint pageID, string indexName, TransactionService transaction, Snapshot snapshot) { if (pageID == uint.MaxValue) { yield break; } var page = snapshot.GetPage <BasePage>(pageID); while (page != null) { _collections.TryGetValue(page.ColID, out var collection); yield return(new BsonDocument { ["pageID"] = (int)page.PageID, ["pageType"] = page.PageType.ToString(), ["slot"] = (int)page.PageListSlot, ["collection"] = collection, ["index"] = indexName, ["freeBytes"] = page.FreeBytes, ["itemsCount"] = (int)page.ItemsCount }); if (page.NextPageID == uint.MaxValue) { break; } transaction.Safepoint(); page = snapshot.GetPage <BasePage>(page.NextPageID); } }
private IEnumerable <BsonDocument> GetAllList(TransactionService transaction, Snapshot snapshot) { // get empty page list, from header foreach (var page in this.GetList(_header.FreeEmptyPageList, null, transaction, snapshot)) { yield return(page); } // get lists from data pages/index list foreach (var collection in _collections) { var snap = transaction.CreateSnapshot(LockMode.Read, collection.Value, false); for (var slot = 0; slot < PAGE_FREE_LIST_SLOTS; slot++) { var result = this.GetList(snap.CollectionPage.FreeDataPageList[slot], null, transaction, snap); foreach (var page in result) { yield return(page); } } var indexes = snap.CollectionPage.GetCollectionIndexes().ToArray(); foreach (var index in indexes) { var result = this.GetList(index.FreeIndexPageList, index.Name, transaction, snap); foreach (var page in result) { yield return(page); } } } }