/// <summary> /// Delete one dataBlock /// </summary> public DataBlock Delete(CollectionPage col, PageAddress blockAddress) { // get page and mark as dirty var page = _pager.GetPage <DataPage>(blockAddress.PageID, true); var block = page.DataBlocks[blockAddress.Index]; // mark collection page as dirty _pager.SetDirty(col); // if there a extended page, delete all if (block.ExtendPageID != uint.MaxValue) { _pager.DeletePage(block.ExtendPageID, true); } // delete block inside page page.DataBlocks.Remove(block.Position.Index); // update freebytes + itemcount page.UpdateItemCount(); // if there is no more datablocks, lets delete the page if (page.DataBlocks.Count == 0) { // first, remove from free list _pager.AddOrRemoveToFreeList(false, page, col, ref col.FreeDataPageID); _pager.DeletePage(page.PageID); } else { // add or remove to free list _pager.AddOrRemoveToFreeList(page.FreeBytes > DataPage.DATA_RESERVED_BYTES, page, col, ref col.FreeDataPageID); } col.DocumentCount--; return(block); }
/// <summary> /// Create a new index and returns head page address (skip list) /// </summary> public CollectionIndex CreateIndex(CollectionPage col) { // get index slot var index = col.GetFreeIndex(); // get a new index page for first index page var page = _pager.NewPage <IndexPage>(); // create a empty node with full max level var head = new IndexNode(IndexNode.MAX_LEVEL_LENGTH) { Key = BsonValue.MinValue, KeyLength = (ushort)BsonValue.MinValue.GetBytesCount(false), Page = page, Position = new PageAddress(page.PageID, 0) }; // add as first node page.Nodes.Add(head.Position.Index, head); // update freebytes + item count (for head) page.UpdateItemCount(); _pager.SetDirty(index.Page); // add indexPage on freelist if has space _pager.AddOrRemoveToFreeList(true, page, index.Page, ref index.FreeIndexPageID); // point the head/tail node to this new node position index.HeadNode = head.Position; // insert tail node var tail = this.AddNode(index, BsonValue.MaxValue, IndexNode.MAX_LEVEL_LENGTH); index.TailNode = tail.Position; return(index); }
private void Usage(CollectionPage col, out int indexPages, out int indexFree, out int dataPages, out int extendPages, out int dataFree, out int docSize) { var pages = new HashSet <uint>(); indexPages = indexFree = dataPages = extendPages = dataFree = docSize = 0; // get all pages from PK index + data/extend pages foreach (var node in _indexer.FindAll(col.PK, Query.Ascending)) { if (pages.Contains(node.Position.PageID)) { continue; } pages.Add(node.Position.PageID); indexPages++; indexFree += node.Page.FreeBytes; foreach (var n in node.Page.Nodes.Values.Where(x => !x.DataBlock.IsEmpty)) { var dataPage = _pager.GetPage <DataPage>(n.DataBlock.PageID, false); if (pages.Contains(dataPage.PageID)) { continue; } foreach (var block in dataPage.DataBlocks.Values) { var doc = BsonSerializer.Deserialize(_data.Read(block.Position, true).Buffer); docSize += doc.GetBytesCount(true); } pages.Add(dataPage.PageID); dataPages++; dataFree += dataPage.FreeBytes; // getting extended pages foreach (var ex in dataPage.DataBlocks.Values.Where(x => x.ExtendPageID != uint.MaxValue)) { foreach (var extendPage in _pager.GetSeqPages <ExtendPage>(ex.ExtendPageID)) { extendPages++; dataFree += extendPage.FreeBytes; } } } _cache.CheckPoint(); } // add all others indexes foreach (var index in col.GetIndexes(false)) { foreach (var node in _indexer.FindAll(index, Query.Ascending)) { if (pages.Contains(node.Position.PageID)) { continue; } pages.Add(node.Position.PageID); indexPages++; indexFree += node.Page.FreeBytes; _cache.CheckPoint(); } } }