/// <summary> /// Drop a collection - remove all data pages + indexes pages /// </summary> public void Drop(CollectionPage col, CacheService cache) { // add all pages to delete var pages = new HashSet <uint>(); // search for all data page and index page foreach (var index in col.GetIndexes(true)) { // get all nodes from index var nodes = _indexer.FindAll(index, Query.Ascending); foreach (var node in nodes) { // if is PK index, add dataPages if (index.Slot == 0) { pages.Add(node.DataBlock.PageID); // read datablock to check if there is any extended page var block = _data.Read(node.DataBlock, false); if (block.ExtendPageID != uint.MaxValue) { _pager.DeletePage(block.ExtendPageID, true); cache.CheckPoint(); } } // add index page to delete list page pages.Add(node.Position.PageID); } // remove head+tail nodes in all indexes pages.Add(index.HeadNode.PageID); pages.Add(index.TailNode.PageID); } // and now, lets delete all this pages foreach (var pageID in pages) { _pager.DeletePage(pageID); cache.CheckPoint(); } // get header page to remove from collection list links var header = _pager.GetPage <HeaderPage>(0, true); header.CollectionPages.Remove(col.CollectionName); _pager.DeletePage(col.PageID); }
public static void Dump(this CollectionPage page, StringBuilder sb) { sb.AppendFormat("'{0}', Count: {1}, FreeDataPageID: {2}, Indexes = ", page.CollectionName, page.DocumentCount, page.FreeDataPageID.Dump()); foreach (var i in page.GetIndexes(true)) { sb.AppendFormat("[{0}] Field: '{1}', Head: {2}, FreeIndexPageID: {3} / ", i.Slot, i.Field, i.HeadNode.Dump(), i.FreeIndexPageID.Dump()); } }
private void Usage(CollectionPage col, out int indexPages, out int indexFree, out int dataPages, out int extendPages, out int dataFree, out int docSize) { var pages = new HashSet <uint>(); indexPages = indexFree = dataPages = extendPages = dataFree = docSize = 0; // get all pages from PK index + data/extend pages foreach (var node in _indexer.FindAll(col.PK, Query.Ascending)) { if (pages.Contains(node.Position.PageID)) { continue; } pages.Add(node.Position.PageID); indexPages++; indexFree += node.Page.FreeBytes; foreach (var n in node.Page.Nodes.Values.Where(x => !x.DataBlock.IsEmpty)) { var dataPage = _pager.GetPage <DataPage>(n.DataBlock.PageID, false); if (pages.Contains(dataPage.PageID)) { continue; } foreach (var block in dataPage.DataBlocks.Values) { var doc = BsonSerializer.Deserialize(_data.Read(block.Position, true).Buffer); docSize += doc.GetBytesCount(true); } pages.Add(dataPage.PageID); dataPages++; dataFree += dataPage.FreeBytes; // getting extended pages foreach (var ex in dataPage.DataBlocks.Values.Where(x => x.ExtendPageID != uint.MaxValue)) { foreach (var extendPage in _pager.GetSeqPages <ExtendPage>(ex.ExtendPageID)) { extendPages++; dataFree += extendPage.FreeBytes; } } } _cache.CheckPoint(); } // add all others indexes foreach (var index in col.GetIndexes(false)) { foreach (var node in _indexer.FindAll(index, Query.Ascending)) { if (pages.Contains(node.Position.PageID)) { continue; } pages.Add(node.Position.PageID); indexPages++; indexFree += node.Page.FreeBytes; _cache.CheckPoint(); } } }