public bool MovePrev() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { // In here we will also have the 'current' page (even if we are traversing a compressed node). if (_prefetch) { MaybePrefetchPagesReferencedBy(_currentPage); } _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tree.GetReadOnlyTreePage(node->PageNumber); if (_currentPage.IsCompressed) { DecompressedCurrentPage(); } _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; } // We should be prefetching data pages down here. if (_prefetch) { MaybePrefetchPagesReferencedBy(_currentPage); } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (DoRequireValidation && this.ValidateCurrentKey(_tx, current) == false) { return(false); } _prevKeyScope.Dispose(); _prevKeyScope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current, out _currentInternalKey); _currentKey = _currentInternalKey; return(true);// there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public byte *Execute() { using (DisableFreeSpaceUsageIfSplittingRootTree()) { TreePage rightPage = _tree.NewPage(_page.TreeFlags, _page.PageNumber); if (_cursor.PageCount == 0) // we need to do a root split { TreePage newRootPage = _tree.NewPage(TreePageFlags.Branch, _page.PageNumber); _cursor.Push(newRootPage); _tree.State.RootPageNumber = newRootPage.PageNumber; _tree.State.Depth++; // now add implicit left page newRootPage.AddPageRefNode(0, Slices.BeforeAllKeys, _page.PageNumber); _parentPage = newRootPage; _parentPage.LastSearchPosition++; } else { // we already popped the page, so the current one on the stack is the parent of the page _parentPage = _tree.ModifyPage(_cursor.CurrentPage); _cursor.Update(_cursor.Pages, _parentPage); } if (_page.IsLeaf) { _tree.ClearPagesCache(); } if (_page.IsCompressed) { _pageDecompressed = _tree.DecompressPage(_page); _pageDecompressed.Search(_tx, _newKey); if (_pageDecompressed.LastMatch == 0) { // we are going to insert the value in a bit, but it might have // been in the compressed portion and not removed by the calling // code _tree.RemoveLeafNode(_pageDecompressed); } _page = _pageDecompressed; } using (_pageDecompressed) { if (_page.LastSearchPosition >= _page.NumberOfEntries) { var pos = OptimizedOnlyMoveNewValueToTheRightPage(rightPage); RecompressPageIfNeeded(wasModified: false); return(pos); } return(SplitPageInHalf(rightPage)); } } }
private void RebalanceRoot(TreePage page) { if (page.NumberOfEntries == 0) { return; // nothing to do } if (!page.IsBranch || page.NumberOfEntries > 1) { return; // cannot do anything here } // in this case, we have a root pointer with just one pointer, we can just swap it out var node = page.GetNode(0); Debug.Assert(node->Flags == (TreeNodeFlags.PageRef)); var rootPage = _tree.ModifyPage(node->PageNumber); _tree.State.RootPageNumber = rootPage.PageNumber; _tree.State.Depth--; Debug.Assert(rootPage.Dirty); _cursor.Pop(); _cursor.Push(rootPage); _tree.FreePage(page); }
public byte *AddSeparator(Slice separator, long pageRefNumber, int?nodePos = null) { var originalLastSearchPositionOfParent = _parentPage.LastSearchPosition; if (nodePos == null) { nodePos = _parentPage.NodePositionFor(_tx, separator); // select the appropriate place for this } if (_parentPage.HasSpaceFor(_tx, TreeSizeOf.BranchEntry(separator) + Constants.NodeOffsetSize) == false) { var pageSplitter = new TreePageSplitter(_tx, _tree, separator, -1, pageRefNumber, TreeNodeFlags.PageRef, 0, _cursor); var posToInsert = pageSplitter.Execute(); ParentOfAddedPageRef = _cursor.CurrentPage; var adjustParentPageOnCursor = true; for (int i = 0; i < _cursor.CurrentPage.NumberOfEntries; i++) { if (_cursor.CurrentPage.GetNode(i)->PageNumber == _currentPage.PageNumber) { adjustParentPageOnCursor = false; _cursor.CurrentPage.LastSearchPosition = i; break; } } if (adjustParentPageOnCursor) { // the above page split has modified the cursor that its first page points to the parent of the leaf where 'separatorKey' was inserted // and it doesn't have the reference to _page, we need to ensure that the actual parent is first at the cursor _cursor.Pop(); _cursor.Push(_parentPage); EnsureValidLastSearchPosition(_parentPage, _currentPage.PageNumber, originalLastSearchPositionOfParent); } #if VALIDATE Debug.Assert(_cursor.CurrentPage.GetNode(_cursor.CurrentPage.LastSearchPosition)->PageNumber == _currentPage.PageNumber, "The parent page is not referencing a page which is being split"); var parentToValidate = ParentOfAddedPageRef; Debug.Assert(Enumerable.Range(0, parentToValidate.NumberOfEntries).Any(i => parentToValidate.GetNode(i)->PageNumber == pageRefNumber), "The parent page of a page reference isn't referencing it"); #endif return(posToInsert); } ParentOfAddedPageRef = _parentPage; var pos = _parentPage.AddPageRefNode(nodePos.Value, separator, pageRefNumber); EnsureValidLastSearchPosition(_parentPage, _currentPage.PageNumber, originalLastSearchPositionOfParent); return(pos); }
public bool MovePrev() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tree.GetReadOnlyTreePage(node->PageNumber); _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; if (_prefetch && _currentPage.IsLeaf) { MaybePrefetchOverflowPages(_currentPage); } } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (DoRequireValidation && this.ValidateCurrentKey(_tx, current) == false) { return(false); } _prevKeyScope.Dispose(); _prevKeyScope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current, out _currentInternalKey); _currentKey = _currentInternalKey; return(true);// there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public TreeCursor Build(Slice key) { if (_current != null) { return(_current); } var c = new TreeCursor(); foreach (var p in _cursorPath) { if (p == _lastFoundPageNumber) { c.Push(_pageCopy); } else { var cursorPage = _tree.GetReadOnlyTreePage(p); if (key.Options == SliceOptions.Key) { cursorPage.Search(_llt, key); if (cursorPage.LastMatch != 0) { cursorPage.LastSearchPosition--; } } else if (key.Options == SliceOptions.BeforeAllKeys) { cursorPage.LastSearchPosition = 0; } else if (key.Options == SliceOptions.AfterAllKeys) { cursorPage.LastSearchPosition = (ushort)(cursorPage.NumberOfEntries - 1); } else { throw new ArgumentException(); } c.Push(cursorPage); } } return(c); }
private TreeCursor BuildTreeCursor(Slice key, long[] cursorPath, long lastFoundPageNumber, TreePage pageCopy) { var c = new TreeCursor(); foreach (var p in cursorPath) { if (p == lastFoundPageNumber) { c.Push(pageCopy); } else { var cursorPage = GetReadOnlyTreePage(p); if (key.Options == SliceOptions.Key) { if (cursorPage.Search(_llt, key) != null && cursorPage.LastMatch != 0) { cursorPage.LastSearchPosition--; } } else if (key.Options == SliceOptions.BeforeAllKeys) { cursorPage.LastSearchPosition = 0; } else if (key.Options == SliceOptions.AfterAllKeys) { cursorPage.LastSearchPosition = (ushort)(cursorPage.NumberOfEntries - 1); } else { throw new ArgumentException(); } c.Push(cursorPage); } } return(c); }
public byte *Execute() { using (DisableFreeSpaceUsageIfSplittingRootTree()) { TreePage rightPage = _tree.NewPage(_page.TreeFlags, 1); if (_cursor.PageCount == 0) // we need to do a root split { TreePage newRootPage = _tree.NewPage(TreePageFlags.Branch, 1); _cursor.Push(newRootPage); _tree.State.RootPageNumber = newRootPage.PageNumber; _tree.State.Depth++; // now add implicit left page newRootPage.AddPageRefNode(0, Slices.BeforeAllKeys, _page.PageNumber); _parentPage = newRootPage; _parentPage.LastSearchPosition++; } else { // we already popped the page, so the current one on the stack is the parent of the page _parentPage = _tree.ModifyPage(_cursor.CurrentPage); _cursor.Update(_cursor.Pages.First, _parentPage); } if (_page.IsLeaf) { _tree.ClearPagesCache(); } if (_page.IsCompressed) { _pageDecompressed = _tree.DecompressPage(_page); _pageDecompressed.Search(_tx, _newKey); _page = _pageDecompressed; } using (_pageDecompressed) { if (_page.LastSearchPosition >= _page.NumberOfEntries) { // when we get a split at the end of the page, we take that as a hint that the user is doing // sequential inserts, at that point, we are going to keep the current page as is and create a new // page, this will allow us to do minimal amount of work to get the best density TreePage branchOfSeparator; byte *pos; if (_page.IsBranch) { if (_page.NumberOfEntries > 2) { // here we steal the last entry from the current page so we maintain the implicit null left entry TreeNodeHeader *node = _page.GetNode(_page.NumberOfEntries - 1); Debug.Assert(node->Flags == TreeNodeFlags.PageRef); rightPage.AddPageRefNode(0, Slices.BeforeAllKeys, node->PageNumber); pos = AddNodeToPage(rightPage, 1); Slice separatorKey; using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out separatorKey)) { AddSeparatorToParentPage(rightPage.PageNumber, separatorKey, out branchOfSeparator); } _page.RemoveNode(_page.NumberOfEntries - 1); } else { _tree.FreePage(rightPage); // return the unnecessary right page pos = AddSeparatorToParentPage(_pageNumber, _newKey, out branchOfSeparator); if (_cursor.CurrentPage.PageNumber != branchOfSeparator.PageNumber) { _cursor.Push(branchOfSeparator); } return(pos); } } else { AddSeparatorToParentPage(rightPage.PageNumber, _newKey, out branchOfSeparator); pos = AddNodeToPage(rightPage, 0); } _cursor.Push(rightPage); return(pos); } return(SplitPageInHalf(rightPage)); } } }
private bool TryUseRecentTransactionPage(Slice key, out Func <TreeCursor> cursor, out TreePage page, out TreeNodeHeader *node) { node = null; page = null; cursor = null; var recentPages = _recentlyFoundPages; if (recentPages == null) { return(false); } var foundPage = recentPages.Find(key); if (foundPage == null) { return(false); } var lastFoundPageNumber = foundPage.Number; if (foundPage.Page != null) { // we can't share the same instance, Page instance may be modified by // concurrently run iterators page = new TreePage(foundPage.Page.Base, foundPage.Page.Source, foundPage.Page.PageSize); } else { page = _llt.GetReadOnlyTreePage(lastFoundPageNumber); } if (page.IsLeaf == false) { throw new InvalidDataException("Index points to a non leaf page"); } node = page.Search(_llt, key); // will set the LastSearchPosition var cursorPath = foundPage.CursorPath; var pageCopy = page; cursor = () => { var c = new TreeCursor(); foreach (var p in cursorPath) { if (p == lastFoundPageNumber) { c.Push(pageCopy); } else { var cursorPage = _llt.GetReadOnlyTreePage(p); if (key.Options == SliceOptions.Key) { if (cursorPage.Search(_llt, key) != null && cursorPage.LastMatch != 0) { cursorPage.LastSearchPosition--; } } else if (key.Options == SliceOptions.BeforeAllKeys) { cursorPage.LastSearchPosition = 0; } else if (key.Options == SliceOptions.AfterAllKeys) { cursorPage.LastSearchPosition = (ushort)(cursorPage.NumberOfEntries - 1); } else { throw new ArgumentException(); } c.Push(cursorPage); } } return(c); }; return(true); }
private TreePage SearchForPage(Slice key, out Func <TreeCursor> cursorConstructor, out TreeNodeHeader *node) { var p = _llt.GetReadOnlyTreePage(State.RootPageNumber); var cursor = new TreeCursor(); cursor.Push(p); bool rightmostPage = true; bool leftmostPage = true; while ((p.TreeFlags & TreePageFlags.Branch) == TreePageFlags.Branch) { int nodePos; if (key.Options == SliceOptions.BeforeAllKeys) { p.LastSearchPosition = nodePos = 0; rightmostPage = false; } else if (key.Options == SliceOptions.AfterAllKeys) { p.LastSearchPosition = nodePos = (ushort)(p.NumberOfEntries - 1); leftmostPage = false; } else { if (p.Search(_llt, key) != null) { nodePos = p.LastSearchPosition; if (p.LastMatch != 0) { nodePos--; p.LastSearchPosition--; } if (nodePos != 0) { leftmostPage = false; } rightmostPage = false; } else { nodePos = (ushort)(p.LastSearchPosition - 1); leftmostPage = false; } } var pageNode = p.GetNode(nodePos); p = _llt.GetReadOnlyTreePage(pageNode->PageNumber); Debug.Assert(pageNode->PageNumber == p.PageNumber, string.Format("Requested Page: #{0}. Got Page: #{1}", pageNode->PageNumber, p.PageNumber)); cursor.Push(p); } cursorConstructor = () => cursor; if (p.IsLeaf == false) { throw new InvalidDataException("Index points to a non leaf page"); } node = p.Search(_llt, key); // will set the LastSearchPosition AddToRecentlyFoundPages(cursor, p, leftmostPage, rightmostPage); return(p); }
private TreePage SearchForPage(Slice key, bool allowCompressed, out Func <Slice, TreeCursor> cursorConstructor, out TreeNodeHeader *node, bool addToRecentlyFoundPages = true) { var p = GetReadOnlyTreePage(State.RootPageNumber); var cursor = new TreeCursor(); cursor.Push(p); bool rightmostPage = true; bool leftmostPage = true; while ((p.TreeFlags & TreePageFlags.Branch) == TreePageFlags.Branch) { int nodePos; if (key.Options == SliceOptions.BeforeAllKeys) { p.LastSearchPosition = nodePos = 0; rightmostPage = false; } else if (key.Options == SliceOptions.AfterAllKeys) { p.LastSearchPosition = nodePos = (ushort)(p.NumberOfEntries - 1); leftmostPage = false; } else { if (p.Search(_llt, key) != null) { nodePos = p.LastSearchPosition; if (p.LastMatch != 0) { nodePos--; p.LastSearchPosition--; } if (nodePos != 0) { leftmostPage = false; } rightmostPage = false; } else { nodePos = (ushort)(p.LastSearchPosition - 1); leftmostPage = false; } } var pageNode = p.GetNode(nodePos); p = GetReadOnlyTreePage(pageNode->PageNumber); Debug.Assert(pageNode->PageNumber == p.PageNumber, string.Format("Requested Page: #{0}. Got Page: #{1}", pageNode->PageNumber, p.PageNumber)); cursor.Push(p); } cursorConstructor = _ => cursor; if (p.IsLeaf == false) { VoronUnrecoverableErrorException.Raise(_llt.Environment, "Index points to a non leaf page"); } if (allowCompressed == false && p.IsCompressed) { ThrowOnCompressedPage(p); } node = p.Search(_llt, key); // will set the LastSearchPosition if (p.NumberOfEntries > 0 && addToRecentlyFoundPages) // compressed page can have no ordinary entries { AddToRecentlyFoundPages(cursor, p, leftmostPage, rightmostPage); } return(p); }