/// <summary> /// Delete a page - this page will be marked as Empty page /// There is no re-use deleted page in same transaction - deleted pages will be in another linked list and will /// be part of Header free list page only in commit /// </summary> private void DeletePage <T>(T page) where T : BasePage { ENSURE(page.PrevPageID == uint.MaxValue && page.NextPageID == uint.MaxValue, "before delete a page, no linked list with any another page"); ENSURE(page.ItemsCount == 0 && page.UsedBytes == 0 && page.HighestIndex == byte.MaxValue && page.FragmentedBytes == 0, "no items on page when delete this page"); ENSURE(page.PageType == PageType.Data || page.PageType == PageType.Index, "only data/index page can be deleted"); DEBUG(!_collectionPage.FreeDataPageList.Any(x => x == page.PageID), "this page cann't be deleted because free data list page is linked o this page"); DEBUG(!_collectionPage.GetCollectionIndexes().Any(x => x.FreeIndexPageList == page.PageID), "this page cann't be deleted because free index list page is linked o this page"); DEBUG(page.Buffer.Slice(PAGE_HEADER_SIZE, PAGE_SIZE - PAGE_HEADER_SIZE - 1).All(0), "page content shloud be empty"); // mark page as empty and dirty page.MarkAsEmtpy(); // fix this page in free-link-list if (_transPages.FirstDeletedPageID == uint.MaxValue) { ENSURE(_transPages.DeletedPages == 0, "if has no firstDeletedPageID must has deleted pages"); // set first and last deleted page as current deleted page _transPages.FirstDeletedPageID = page.PageID; _transPages.LastDeletedPageID = page.PageID; } else { ENSURE(_transPages.DeletedPages > 0, "must have at least 1 deleted page"); // set next link from current deleted page to first deleted page page.NextPageID = _transPages.FirstDeletedPageID; // and then, set this current deleted page as first page making a linked list _transPages.FirstDeletedPageID = page.PageID; } _transPages.DeletedPages++; }
/// <summary> /// Implement internal update document /// </summary> private bool UpdateDocument(Snapshot snapshot, CollectionPage col, BsonDocument doc, IndexService indexer, DataService data) { // normalize id before find var id = doc["_id"]; // validate id for null, min/max values if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } // find indexNode from pk index var pkNode = indexer.Find(col.PK, id, false, LiteDB.Query.Ascending); // if not found document, no updates if (pkNode == null) { return(false); } // update data storage data.Update(col, pkNode.DataBlock, doc); // get all current non-pk index nodes from this data block (slot, key, nodePosition) var oldKeys = indexer.GetNodeList(pkNode.NextNode) .Select(x => new Tuple <byte, BsonValue, PageAddress>(x.Slot, x.Key, x.Position)) .ToArray(); // build a list of all new key index keys var newKeys = new List <Tuple <byte, BsonValue, string> >(); foreach (var index in col.GetCollectionIndexes().Where(x => x.Name != "_id")) { // getting all keys from expression over document var keys = index.BsonExpr.Execute(doc, _header.Pragmas.Collation); foreach (var key in keys) { newKeys.Add(new Tuple <byte, BsonValue, string>(index.Slot, key, index.Name)); } } if (oldKeys.Length == 0 && newKeys.Count == 0) { return(true); } // get a list of all nodes that are in oldKeys but not in newKeys (must delete) var toDelete = new HashSet <PageAddress>(oldKeys .Where(x => newKeys.Any(n => n.Item1 == x.Item1 && n.Item2 == x.Item2) == false) .Select(x => x.Item3)); // get a list of all keys that are not in oldKeys (must insert) var toInsert = newKeys .Where(x => oldKeys.Any(o => o.Item1 == x.Item1 && o.Item2 == x.Item2) == false) .ToArray(); // if nothing to change, just exit if (toDelete.Count == 0 && toInsert.Length == 0) { return(true); } // delete nodes and return last keeped node in list var last = indexer.DeleteList(pkNode.Position, toDelete); // now, insert all new nodes foreach (var elem in toInsert) { var index = col.GetCollectionIndex(elem.Item3); last = indexer.AddNode(index, elem.Item2, pkNode.DataBlock, last); } return(true); }
private IEnumerable <BsonDocument> SysDump(FileOrigin origin) { var collections = _header.GetCollections().ToDictionary(x => x.Value, x => x.Key); foreach (var buffer in _disk.ReadFull(origin)) { var page = new BasePage(buffer); var pageID = page.PageID; if (origin == FileOrigin.Data && buffer.Position > 0 && pageID == 0) { // this will fix print PageID in data file bubbles pages pageID = (uint)(buffer.Position / PAGE_SIZE); } var doc = new BsonDocument(); doc["_position"] = (int)buffer.Position; doc["pageID"] = (int)pageID; doc["pageType"] = page.PageType.ToString(); doc["nextPageID"] = dumpPageID(page.NextPageID); doc["prevPageID"] = dumpPageID(page.PrevPageID); doc["collection"] = collections.GetOrDefault(page.ColID, "-"); doc["transactionID"] = (int)page.TransactionID; doc["isConfirmed"] = page.IsConfirmed; doc["itemsCount"] = (int)page.ItemsCount; doc["freeBytes"] = page.FreeBytes; doc["usedBytes"] = (int)page.UsedBytes; doc["fragmentedBytes"] = (int)page.FragmentedBytes; doc["nextFreePosition"] = (int)page.NextFreePosition; doc["highestIndex"] = (int)page.HighestIndex; if (page.PageType == PageType.Header) { var header = new HeaderPage(buffer); doc["freeEmptyPageID"] = dumpPageID(header.FreeEmptyPageID); doc["lastPageID"] = (int)header.LastPageID; doc["creationTime"] = header.CreationTime; doc["userVersion"] = header.UserVersion; doc["collections"] = new BsonDocument(header.GetCollections().ToDictionary(x => x.Key, x => new BsonValue((int)x.Value))); } else if (page.PageType == PageType.Collection) { var collection = new CollectionPage(buffer); doc["lastAnalyzed"] = collection.LastAnalyzed; doc["creationTime"] = collection.CreationTime; doc["freeDataPageID"] = new BsonArray(collection.FreeDataPageID.Select(x => dumpPageID(x))); doc["freeIndexPageID"] = new BsonArray(collection.FreeIndexPageID.Select(x => dumpPageID(x))); doc["indexes"] = new BsonArray(collection.GetCollectionIndexes().Select(x => new BsonDocument { ["name"] = x.Name, ["expression"] = x.Expression, ["unique"] = x.Unique, ["headPageID"] = dumpPageID(x.Head.PageID), ["tailPageID"] = dumpPageID(x.Tail.PageID), ["maxLevel"] = (int)x.MaxLevel, ["keyCount"] = (int)x.KeyCount, ["uniqueKeyCount"] = (int)x.UniqueKeyCount })); } yield return(doc); } BsonValue dumpPageID(uint pageID) { return(pageID == uint.MaxValue ? BsonValue.Null : new BsonValue((int)pageID)); } }
private IEnumerable <BsonDocument> DumpPages(uint?pageID) { var collections = _header.GetCollections().ToDictionary(x => x.Value, x => x.Key); // get any transaction from current thread ID var transaction = _monitor.GetThreadTransaction(); var snapshot = transaction.CreateSnapshot(LockMode.Read, "$", false); var start = pageID.HasValue ? pageID.Value : 0; var end = pageID.HasValue ? pageID.Value : _header.LastPageID; for (uint i = start; i <= Math.Min(end, _header.LastPageID); i++) { var page = snapshot.GetPage <BasePage>(i, out var origin, out var position, out var walVersion); var doc = new BsonDocument { ["pageID"] = (int)page.PageID, ["pageType"] = page.PageType.ToString(), ["_position"] = position, ["_origin"] = origin.ToString(), ["_version"] = walVersion, ["prevPageID"] = (int)page.PrevPageID, ["nextPageID"] = (int)page.NextPageID, ["slot"] = (int)page.PageListSlot, ["collection"] = collections.GetOrDefault(page.ColID, "-"), ["itemsCount"] = (int)page.ItemsCount, ["freeBytes"] = page.FreeBytes, ["usedBytes"] = (int)page.UsedBytes, ["fragmentedBytes"] = (int)page.FragmentedBytes, ["nextFreePosition"] = (int)page.NextFreePosition, ["highestIndex"] = (int)page.HighestIndex }; if (page.PageType == PageType.Collection) { var collectionPage = new CollectionPage(page.Buffer); doc["dataPageList"] = new BsonArray(collectionPage.FreeDataPageList.Select(x => new BsonValue((int)x))); doc["indexes"] = new BsonArray(collectionPage.GetCollectionIndexes().Select(x => new BsonDocument { ["slot"] = (int)x.Slot, ["empty"] = x.IsEmpty, ["indexType"] = (int)x.IndexType, ["name"] = x.Name, ["expression"] = x.Expression, ["unique"] = x.Unique, ["head"] = x.Head.ToBsonValue(), ["tail"] = x.Tail.ToBsonValue(), ["maxLevel"] = (int)x.MaxLevel, ["freeIndexPageList"] = (int)x.FreeIndexPageList, })); } if (pageID.HasValue) { doc["buffer"] = page.Buffer.ToArray(); } yield return(doc); transaction.Safepoint(); } }