public bool MovePrev() { while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tx.GetReadOnlyPage(node->PageNumber); _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (this.ValidateCurrentKey(current, _currentPage) == false) { return(false); } _currentPage.SetNodeKey(current, ref _currentInternalKey); _currentKey = _currentInternalKey.ToSlice(); return(true); // there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public byte *Execute() { var rightPage = Tree.NewPage(_tx, _page.Flags, 1); _txInfo.RecordNewPage(_page, 1); rightPage.Flags = _page.Flags; if (_cursor.PageCount == 0) // we need to do a root split { var newRootPage = Tree.NewPage(_tx, PageFlags.Branch, 1); _cursor.Push(newRootPage); _txInfo.RootPageNumber = newRootPage.PageNumber; _txInfo.State.Depth++; _txInfo.RecordNewPage(newRootPage, 1); // now add implicit left page newRootPage.AddNode(0, Slice.BeforeAllKeys, -1, _page.PageNumber); _parentPage = newRootPage; _parentPage.LastSearchPosition++; _parentPage.ItemCount = _page.ItemCount; } else { // we already popped the page, so the current one on the stack is what the parent of the page _parentPage = _cursor.CurrentPage; } if (_page.LastSearchPosition >= _page.NumberOfEntries) { // when we get a split at the end of the page, we take that as a hint that the user is doing // sequential inserts, at that point, we are going to keep the current page as is and create a new // page, this will allow us to do minimal amount of work to get the best density byte *pos; if (_page.IsBranch) { // here we steal the last entry from the current page so we maintain the implicit null left entry var node = _page.GetNode(_page.NumberOfEntries - 1); Debug.Assert(node->Flags == NodeFlags.PageRef); var itemsMoved = _tx.Pager.Get(_tx, node->PageNumber).ItemCount; rightPage.AddNode(0, Slice.Empty, -1, node->PageNumber); pos = rightPage.AddNode(1, _newKey, _len, _pageNumber); rightPage.ItemCount = itemsMoved; AddSeperatorToParentPage(rightPage, new Slice(node)); _page.RemoveNode(_page.NumberOfEntries - 1); _page.ItemCount -= itemsMoved; } else { AddSeperatorToParentPage(rightPage, _newKey); pos = rightPage.AddNode(0, _newKey, _len, _pageNumber); } _cursor.Push(rightPage); IncrementItemCountIfNecessary(); return(pos); } return(SplitPageInHalf(rightPage)); }
private Page SetupMoveOrMerge(Page page, Page parentPage) { Page sibling; if (parentPage.LastSearchPosition == 0) // we are the left most item { sibling = _tx.ModifyPage(parentPage.GetNode(1)->PageNumber, _tree, null); sibling.LastSearchPosition = 0; page.LastSearchPosition = page.NumberOfEntries; parentPage.LastSearchPosition = 1; } else // there is at least 1 page to our left { var beyondLast = parentPage.LastSearchPosition == parentPage.NumberOfEntries; if (beyondLast) { parentPage.LastSearchPosition--; } parentPage.LastSearchPosition--; sibling = _tx.ModifyPage(parentPage.GetNode(parentPage.LastSearchPosition)->PageNumber, _tree, null); parentPage.LastSearchPosition++; if (beyondLast) { parentPage.LastSearchPosition++; } sibling.LastSearchPosition = sibling.NumberOfEntries - 1; page.LastSearchPosition = 0; } return(sibling); }
private void MoveBranchNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch); var originalFromKeyStart = GetActualKey(from, from.LastSearchPositionOrLastEntry); var fromNode = from.GetNode(from.LastSearchPosition); long pageNum = fromNode->PageNumber; var itemsMoved = _tx.Pager.Get(_tx, pageNum).ItemCount; from.ItemCount -= itemsMoved; to.ItemCount += itemsMoved; if (to.LastSearchPosition == 0) { // cannot add to left implicit side, adjust by moving the left node // to the right by one, then adding the new one as the left var implicitLeftKey = GetActualKey(to, 0); var leftPageNumber = to.GetNode(0)->PageNumber; to.AddNode(1, implicitLeftKey, -1, leftPageNumber); to.AddNode(0, Slice.BeforeAllKeys, -1, pageNum); to.RemoveNode(1); } else { to.AddNode(to.LastSearchPosition, originalFromKeyStart, -1, pageNum); } if (from.LastSearchPositionOrLastEntry == 0) { // cannot just remove the left node, need to adjust those var rightPageNumber = from.GetNode(1)->PageNumber; from.RemoveNode(0); // remove the original node from.RemoveNode(0); // remove the next node from.AddNode(0, Slice.BeforeAllKeys, -1, rightPageNumber); Debug.Assert(from.NumberOfEntries >= 2); } else { from.RemoveNode(from.LastSearchPositionOrLastEntry); } var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newKey = GetActualKey(from, 0); } parentPage.AddNode(pos, newKey, -1, pageNumber); }
/// <summary> /// For leaf pages, check the split point based on what /// fits where, since otherwise adding the node can fail. /// This check is only needed when the data items are /// relatively large, such that being off by one will /// make the difference between success or failure. /// It's also relevant if a page happens to be laid out /// such that one half of its nodes are all "small" and /// the other half of its nodes are "large." If the new /// item is also "large" and falls on the half with /// "large" nodes, it also may not fit. /// </summary> private int AdjustSplitPosition(Slice key, int len, Page page, int currentIndex, int splitIndex, ref bool newPosition) { int nodeSize = SizeOf.NodeEntry(AbstractPager.PageMaxSpace, key, len) + Constants.NodeOffsetSize; if (page.NumberOfEntries >= 20 && nodeSize <= AbstractPager.PageMaxSpace / 16) { return(splitIndex); } int pageSize = nodeSize; if (currentIndex <= splitIndex) { newPosition = false; for (int i = 0; i < splitIndex; i++) { NodeHeader *node = page.GetNode(i); pageSize += node->GetNodeSize(); pageSize += pageSize & 1; if (pageSize > AbstractPager.PageMaxSpace) { if (i <= currentIndex) { if (i < currentIndex) { newPosition = true; } return(currentIndex); } return((ushort)i); } } } else { for (int i = page.NumberOfEntries - 1; i >= splitIndex; i--) { NodeHeader *node = page.GetNode(i); pageSize += node->GetNodeSize(); pageSize += pageSize & 1; if (pageSize > AbstractPager.PageMaxSpace) { if (i >= currentIndex) { newPosition = false; return(currentIndex); } return((ushort)(i + 1)); } } } return(splitIndex); }
private void MoveBranchNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch); var originalFromKeyStart = GetActualKey(from, from.LastSearchPositionOrLastEntry); to.EnsureHasSpaceFor(_tx, originalFromKeyStart, -1); var fromNode = from.GetNode(from.LastSearchPosition); long pageNum = fromNode->PageNumber; if (to.LastSearchPosition == 0) { // cannot add to left implicit side, adjust by moving the left node // to the right by one, then adding the new one as the left var implicitLeftKey = GetActualKey(to, 0); var leftPageNumber = to.GetNode(0)->PageNumber; to.AddPageRefNode(1, implicitLeftKey, leftPageNumber); to.ChangeImplicitRefPageNode(pageNum); // setup the new implicit node } else { to.AddPageRefNode(to.LastSearchPosition, originalFromKeyStart, pageNum); } if (from.LastSearchPositionOrLastEntry == 0) { var rightPageNumber = from.GetNode(1)->PageNumber; from.RemoveNode(0); // remove the original implicit node from.ChangeImplicitRefPageNode(rightPageNumber); // setup the new implicit node Debug.Assert(from.NumberOfEntries >= 2); } else { from.RemoveNode(from.LastSearchPositionOrLastEntry); } var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newKey = GetActualKey(from, 0); } parentPage.EnsureHasSpaceFor(_tx, newKey, -1); parentPage.AddPageRefNode(pos, newKey, pageNumber); }
private Slice GetActualKey(Page page, int pos) { var node = page.GetNode(pos); var key = new Slice(node); while (key.Size == 0) { Debug.Assert(page.LastSearchPosition == 0 && page.IsBranch); page = _tx.GetReadOnlyPage(node->PageNumber); node = page.GetNode(0); key.Set(node); } return(key); }
private MemorySlice GetActualKey(Page page, int pos, out NodeHeader *node) { node = page.GetNode(pos); var key = page.GetNodeKey(node); while (key.KeyLength == 0) { Debug.Assert(page.IsBranch); page = _tx.GetReadOnlyPage(node->PageNumber); node = page.GetNode(0); key = page.GetNodeKey(node); } return(key); }
private void MoveLeafNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch == false); var originalFromKeyStart = GetActualKey(from, from.LastSearchPositionOrLastEntry); var fromNode = from.GetNode(from.LastSearchPosition); byte *val = @from.Base + @from.KeysOffsets[@from.LastSearchPosition] + Constants.NodeHeaderSize + originalFromKeyStart.Size; var dataPos = to.AddNode(to.LastSearchPosition, originalFromKeyStart, fromNode->DataSize, -1); NativeMethods.memcpy(dataPos, val, fromNode->DataSize); [email protected]; ++to.ItemCount; from.RemoveNode(from.LastSearchPositionOrLastEntry); var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newKey = GetActualKey(from, 0); } parentPage.AddNode(pos, newKey, -1, pageNumber); }
private void ExpandMultiTreeNestedPageSize(Transaction tx, Slice key, Slice value, byte *nestedPagePtr, ushort newSize, int currentSize) { Debug.Assert(newSize > currentSize); TemporaryPage tmp; using (tx.Environment.GetTemporaryPage(tx, out tmp)) { var tempPagePointer = tmp.TempPagePointer; NativeMethods.memcpy(tempPagePointer, nestedPagePtr, currentSize); Delete(key); // release our current page Page nestedPage = new Page(tempPagePointer, "multi tree", (ushort)currentSize); var ptr = DirectAdd(key, newSize); var newNestedPage = new Page(ptr, "multi tree", newSize) { Lower = (ushort)Constants.PageHeaderSize, Upper = newSize, Flags = PageFlags.Leaf, PageNumber = -1L // mark as invalid page number }; Slice nodeKey = new Slice(SliceOptions.Key); for (int i = 0; i < nestedPage.NumberOfEntries; i++) { var nodeHeader = nestedPage.GetNode(i); nodeKey.Set(nodeHeader); newNestedPage.AddDataNode(i, nodeKey, 0, (ushort)(nodeHeader->Version - 1)); // we dec by one because AdddataNode will inc by one, and we don't want to change those values } newNestedPage.Search(value); newNestedPage.AddDataNode(newNestedPage.LastSearchPosition, value, 0, 0); } }
private void MoveLeafNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch == false); var originalFromKeyStart = GetActualKey(from, from.LastSearchPositionOrLastEntry); var fromNode = from.GetNode(from.LastSearchPosition); byte *val = @from.Base + @from.KeysOffsets[@from.LastSearchPosition] + Constants.NodeHeaderSize + originalFromKeyStart.Size; var nodeVersion = fromNode->Version; // every time new node is allocated the version is increased, but in this case we do not want to increase it if (nodeVersion > 0) { nodeVersion -= 1; } byte *dataPos; switch (fromNode->Flags) { case NodeFlags.PageRef: to.EnsureHasSpaceFor(_tx, originalFromKeyStart, -1); dataPos = to.AddPageRefNode(to.LastSearchPosition, originalFromKeyStart, fromNode->PageNumber); break; case NodeFlags.Data: to.EnsureHasSpaceFor(_tx, originalFromKeyStart, fromNode->DataSize); dataPos = to.AddDataNode(to.LastSearchPosition, originalFromKeyStart, fromNode->DataSize, nodeVersion); break; case NodeFlags.MultiValuePageRef: to.EnsureHasSpaceFor(_tx, originalFromKeyStart, fromNode->DataSize); dataPos = to.AddMultiValueNode(to.LastSearchPosition, originalFromKeyStart, fromNode->DataSize, nodeVersion); break; default: throw new NotSupportedException("Invalid node type to move: " + fromNode->Flags); } if (dataPos != null) { NativeMethods.memcpy(dataPos, val, fromNode->DataSize); } from.RemoveNode(from.LastSearchPositionOrLastEntry); var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newKey = GetActualKey(from, 0); } parentPage.EnsureHasSpaceFor(_tx, newKey, -1); parentPage.AddPageRefNode(pos, newKey, pageNumber); }
private void RebalanceRoot(Cursor cursor, TreeDataInTransaction txInfo, Page page) { if (page.NumberOfEntries == 0) { return; // nothing to do } if (!page.IsBranch || page.NumberOfEntries > 1) { return; // cannot do anything here } // in this case, we have a root pointer with just one pointer, we can just swap it out var node = page.GetNode(0); Debug.Assert(node->Flags == (NodeFlags.PageRef)); _tx.ModifyCursor(txInfo, cursor); txInfo.State.LeafPages = 1; txInfo.State.BranchPages = 0; txInfo.State.Depth = 1; txInfo.State.PageCount = 1; var rootPage = _tx.ModifyPage(_txInfo.Tree, null, node->PageNumber, cursor); rootPage.ItemCount = 1; txInfo.RootPageNumber = rootPage.PageNumber; Debug.Assert(rootPage.Dirty); cursor.Pop(); cursor.Push(rootPage); _tx.FreePage(page.PageNumber); }
private void RebalanceRoot(Page page) { if (page.NumberOfEntries == 0) { return; // nothing to do } if (!page.IsBranch || page.NumberOfEntries > 1) { return; // cannot do anything here } // in this case, we have a root pointer with just one pointer, we can just swap it out var node = page.GetNode(0); Debug.Assert(node->Flags == (NodeFlags.PageRef)); var rootPage = _tx.ModifyPage(node->PageNumber, _tree, null); _tree.State.RootPageNumber = rootPage.PageNumber; _tree.State.Depth--; Debug.Assert(rootPage.Dirty); _cursor.Pop(); _cursor.Push(rootPage); _tree.FreePage(page); }
private string GatherDetailedDebugInfo(Page rightPage, MemorySlice currentKey, MemorySlice seperatorKey, int currentIndex, int splitIndex, bool toRight) { var debugInfo = new StringBuilder(); debugInfo.AppendFormat("\r\n_tree.Name: {0}\r\n", _tree.Name); debugInfo.AppendFormat("_newKey: {0}, _len: {1}, needed space: {2}\r\n", _newKey, _len, _page.GetRequiredSpace(_newKey, _len)); debugInfo.AppendFormat("key at LastSearchPosition: {0}, current key: {1}, seperatorKey: {2}\r\n", _page.GetNodeKey(_page.LastSearchPosition), currentKey, seperatorKey); debugInfo.AppendFormat("currentIndex: {0}\r\n", currentIndex); debugInfo.AppendFormat("splitIndex: {0}\r\n", splitIndex); debugInfo.AppendFormat("toRight: {0}\r\n", toRight); debugInfo.AppendFormat("_page info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", _page.Flags, _page.NumberOfEntries, _page.SizeLeft, _page.CalcSizeLeft()); for (int i = 0; i < _page.NumberOfEntries; i++) { var node = _page.GetNode(i); var key = _page.GetNodeKey(node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page"); } debugInfo.AppendFormat("rightPage info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", rightPage.Flags, rightPage.NumberOfEntries, rightPage.SizeLeft, rightPage.CalcSizeLeft()); for (int i = 0; i < rightPage.NumberOfEntries; i++) { var node = rightPage.GetNode(i); var key = rightPage.GetNodeKey(node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page"); } return(debugInfo.ToString()); }
private void RemoveBranchWithOneEntry(Page page, Page parentPage) { Debug.Assert(page.NumberOfEntries == 1); var pageRefNumber = page.GetNode(0)->PageNumber; NodeHeader *nodeHeader = null; for (int i = 0; i < parentPage.NumberOfEntries; i++) { nodeHeader = parentPage.GetNode(i); if (nodeHeader->PageNumber == page.PageNumber) { break; } } Debug.Assert(nodeHeader->PageNumber == page.PageNumber, string.Format("Node page number: {0}, page number: {1}", nodeHeader->PageNumber, page.PageNumber)); nodeHeader->PageNumber = pageRefNumber; if (_cursor.CurrentPage.PageNumber == page.PageNumber) { _cursor.Pop(); _cursor.Push(_tx.GetReadOnlyPage(pageRefNumber)); } _tree.FreePage(page); }
public bool MoveNext() { while (true) { _currentPage.LastSearchPosition++; if (_currentPage.LastSearchPosition < _currentPage.NumberOfEntries) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tx.GetReadOnlyPage(node->PageNumber); _currentPage.LastSearchPosition = 0; } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (this.ValidateCurrentKey(current, _cmp) == false) { return(false); } _currentKey.Set(current); return(true); // there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public bool MovePrev() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tx.GetReadOnlyPage(node->PageNumber); _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; if (_prefetch && _currentPage.IsLeaf) { MaybePrefetchOverflowPages(_currentPage); } } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (this.ValidateCurrentKey(current, _currentPage) == false) { return(false); } _currentPage.SetNodeKey(current, ref _currentInternalKey); _currentKey = _currentInternalKey.ToSlice(); return(true);// there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
private static void EnsureValidLastSearchPosition(Page page, long referencedPageNumber, int originalLastSearchPosition) { if (page.NumberOfEntries <= originalLastSearchPosition || page.GetNode(originalLastSearchPosition)->PageNumber != referencedPageNumber) { page.LastSearchPosition = page.NodePositionReferencing(referencedPageNumber); } else { page.LastSearchPosition = originalLastSearchPosition; } }
private void RemoveLeafNode(Page page, out ushort nodeVersion) { var node = page.GetNode(page.LastSearchPosition); nodeVersion = node->Version; if (node->Flags == (NodeFlags.PageRef)) // this is an overflow pointer { var overflowPage = _tx.GetReadOnlyPage(node->PageNumber); FreePage(overflowPage); } page.RemoveNode(page.LastSearchPosition); }
private void MergePages(Page parentPage, Page left, Page right) { for (int i = 0; i < right.NumberOfEntries; i++) { right.LastSearchPosition = i; var key = GetActualKey(right, right.LastSearchPositionOrLastEntry); var node = right.GetNode(i); left.CopyNodeDataToEndOfPage(node, key); } left.ItemCount += right.ItemCount; parentPage.RemoveNode(parentPage.LastSearchPositionOrLastEntry); // unlink the right sibling _tx.FreePage(right.PageNumber); }
private void EnsureNestedPagePointer(Page page, NodeHeader *currentItem, ref Page nestedPage, ref byte *nestedPagePtr) { var movedItem = page.GetNode(page.LastSearchPosition); if (movedItem == currentItem) { return; } // HasSpaceFor could called Defrag internally and read item has moved // need to ensure the nested page has a valid pointer nestedPagePtr = NodeHeader.DirectAccess(_tx, movedItem); nestedPage = new Page(nestedPagePtr, "multi tree", (ushort)NodeHeader.GetDataSize(_tx, movedItem)); }
private bool TrySetPosition() { if (_page.LastSearchPosition < 0 || _page.LastSearchPosition >= _page.NumberOfEntries) { return(false); } var current = _page.GetNode(_page.LastSearchPosition); if (this.ValidateCurrentKey(current) == false) { return(false); } _currentKey.Set(current); return(true); }
private bool HasEnoughSpaceToCopyNodes(Page left, Page right) { var actualSpaceNeeded = 0; var previousSearchPosition = right.LastSearchPosition; for (int i = 0; i < right.NumberOfEntries; i++) { right.LastSearchPosition = i; var key = GetActualKey(right, right.LastSearchPositionOrLastEntry); var node = right.GetNode(i); actualSpaceNeeded += (SizeOf.NodeEntryWithAnotherKey(node, key) + Constants.NodeOffsetSize); } right.LastSearchPosition = previousSearchPosition; //previous position --> prevent mutation of parameter return(left.HasSpaceFor(_tx, actualSpaceNeeded)); }
private bool TrySetPosition() { if (_page.LastSearchPosition < 0 || _page.LastSearchPosition >= _page.NumberOfEntries) { return(false); } var current = _page.GetNode(_page.LastSearchPosition); if (this.ValidateCurrentKey(current, _page) == false) { return(false); } _page.SetNodeKey(current, ref _currentInternalKey); _currentKey = _currentInternalKey.ToSlice(); return(true); }
private void RemoveLeafNode(Page page, out ushort nodeVersion) { var node = page.GetNode(page.LastSearchPosition); nodeVersion = node->Version; if (node->Flags == (NodeFlags.PageRef)) // this is an overflow pointer { var overflowPage = _tx.GetReadOnlyPage(node->PageNumber); var numberOfPages = _tx.DataPager.GetNumberOfOverflowPages(overflowPage.OverflowSize); for (int i = 0; i < numberOfPages; i++) { _tx.FreePage(overflowPage.PageNumber + i); } State.OverflowPages -= numberOfPages; State.PageCount -= numberOfPages; } page.RemoveNode(page.LastSearchPosition); }
private void RemoveLeafNode(Transaction tx, Cursor cursor, Page page) { var node = page.GetNode(page.LastSearchPosition); if (node->Flags == (NodeFlags.PageRef)) // this is an overflow pointer { var overflowPage = tx.GetReadOnlyPage(node->PageNumber); var numberOfPages = GetNumberOfOverflowPages(tx, overflowPage.OverflowSize); for (int i = 0; i < numberOfPages; i++) { tx.FreePage(overflowPage.PageNumber + i); } var txInfo = tx.GetTreeInformation(this); txInfo.State.OverflowPages -= numberOfPages; txInfo.State.PageCount -= numberOfPages; } page.RemoveNode(page.LastSearchPosition); cursor.DecrementItemCount(); }
private bool TrySetPosition() { if (_disposed) { throw new ObjectDisposedException("PageIterator"); } if (_page.LastSearchPosition < 0 || _page.LastSearchPosition >= _page.NumberOfEntries) { return(false); } var current = _page.GetNode(_page.LastSearchPosition); if (this.ValidateCurrentKey(current, _page) == false) { return(false); } _page.SetNodeKey(current, ref _currentInternalKey); _currentKey = _currentInternalKey.ToSlice(); return(true); }
private void RemoveBranchWithOneEntry(Page page, Page parentPage) { var pageRefNumber = page.GetNode(0)->PageNumber; NodeHeader *nodeHeader = null; for (int i = 0; i < parentPage.NumberOfEntries; i++) { nodeHeader = parentPage.GetNode(i); if (nodeHeader->PageNumber == page.PageNumber) { break; } } Debug.Assert(nodeHeader->PageNumber == page.PageNumber); nodeHeader->PageNumber = pageRefNumber; _tree.FreePage(page); }
private void ExpandMultiTreeNestedPageSize(Transaction tx, Slice key, Slice value, byte *nestedPagePtr, ushort newSize, int currentSize) { Debug.Assert(newSize > currentSize); TemporaryPage tmp; using (tx.Environment.GetTemporaryPage(tx, out tmp)) { var tempPagePointer = tmp.TempPagePointer; MemoryUtils.Copy(tempPagePointer, nestedPagePtr, currentSize); Delete(key); // release our current page Page nestedPage = new Page(tempPagePointer, "multi tree", (ushort)currentSize); var ptr = DirectAdd(key, newSize); var newNestedPage = new Page(ptr, "multi tree", newSize) { Lower = (ushort)Constants.PageHeaderSize, Upper = KeysPrefixing ? (ushort)(newSize - Constants.PrefixInfoSectionSize) : newSize, Flags = KeysPrefixing ? PageFlags.Leaf | PageFlags.KeysPrefixed : PageFlags.Leaf, PageNumber = -1L // mark as invalid page number }; newNestedPage.ClearPrefixInfo(); MemorySlice nodeKey = nestedPage.CreateNewEmptyKey(); for (int i = 0; i < nestedPage.NumberOfEntries; i++) { var nodeHeader = nestedPage.GetNode(i); nestedPage.SetNodeKey(nodeHeader, ref nodeKey); nodeKey = newNestedPage.PrepareKeyToInsert(nodeKey, i); newNestedPage.AddDataNode(i, nodeKey, 0, (ushort)(nodeHeader->Version - 1)); // we dec by one because AdddataNode will inc by one, and we don't want to change those values } newNestedPage.Search(value); newNestedPage.AddDataNode(newNestedPage.LastSearchPosition, newNestedPage.PrepareKeyToInsert(value, newNestedPage.LastSearchPosition), 0, 0); } }
private bool TryMergePages(Page parentPage, Page left, Page right) { TemporaryPage tmp; using (_tx.Environment.GetTemporaryPage(_tx, out tmp)) { var mergedPage = tmp.GetTempPage(left.KeysPrefixed); Memory.Copy(mergedPage.Base, left.Base, left.PageSize); var previousSearchPosition = right.LastSearchPosition; for (int i = 0; i < right.NumberOfEntries; i++) { right.LastSearchPosition = i; var key = GetActualKey(right, right.LastSearchPositionOrLastEntry); var node = right.GetNode(i); var prefixedKey = mergedPage.PrepareKeyToInsert(key, mergedPage.NumberOfEntries); if (mergedPage.HasSpaceFor(_tx, SizeOf.NodeEntryWithAnotherKey(node, prefixedKey) + Constants.NodeOffsetSize + SizeOf.NewPrefix(prefixedKey)) == false) { right.LastSearchPosition = previousSearchPosition; //previous position --> prevent mutation of parameter return(false); } mergedPage.CopyNodeDataToEndOfPage(node, prefixedKey); } Memory.Copy(left.Base, mergedPage.Base, left.PageSize); } parentPage.RemoveNode(parentPage.LastSearchPositionOrLastEntry); // unlink the right sibling _tree.FreePage(right); return(true); }
private Slice GetActualKey(Page page, int pos) { var node = page.GetNode(pos); var key = new Slice(node); while (key.Size == 0) { Debug.Assert(page.IsBranch); page = _tx.GetReadOnlyPage(node->PageNumber); node = page.GetNode(0); key.Set(node); } return key; }
private void ExpandMultiTreeNestedPageSize(Transaction tx, Slice key, Slice value, byte* nestedPagePtr, ushort newSize, int currentSize) { Debug.Assert(newSize > currentSize); TemporaryPage tmp; using (tx.Environment.GetTemporaryPage(tx, out tmp)) { var tempPagePointer = tmp.TempPagePointer; NativeMethods.memcpy(tempPagePointer, nestedPagePtr, currentSize); Delete(key); // release our current page Page nestedPage = new Page(tempPagePointer, "multi tree", (ushort)currentSize); var ptr = DirectAdd(key, newSize); var newNestedPage = new Page(ptr, "multi tree", newSize) { Lower = (ushort)Constants.PageHeaderSize, Upper = newSize, Flags = PageFlags.Leaf, PageNumber = -1L // mark as invalid page number }; Slice nodeKey = new Slice(SliceOptions.Key); for (int i = 0; i < nestedPage.NumberOfEntries; i++) { var nodeHeader = nestedPage.GetNode(i); nodeKey.Set(nodeHeader); newNestedPage.AddDataNode(i, nodeKey, 0, (ushort)(nodeHeader->Version - 1)); // we dec by one because AdddataNode will inc by one, and we don't want to change those values } newNestedPage.Search(value); newNestedPage.AddDataNode(newNestedPage.LastSearchPosition, value, 0, 0); } }
private bool HasEnoughSpaceToCopyNodes(Page left, Page right) { var actualSpaceNeeded = 0; var previousSearchPosition = right.LastSearchPosition; for (int i = 0; i < right.NumberOfEntries; i++) { right.LastSearchPosition = i; var key = GetActualKey(right, right.LastSearchPositionOrLastEntry); var node = right.GetNode(i); actualSpaceNeeded += (SizeOf.NodeEntryWithAnotherKey(node, key) + Constants.NodeOffsetSize); } right.LastSearchPosition = previousSearchPosition; //previous position --> prevent mutation of parameter return left.HasSpaceFor(_tx, actualSpaceNeeded); }
private void RebalanceRoot(Page page) { if (page.NumberOfEntries == 0) return; // nothing to do if (!page.IsBranch || page.NumberOfEntries > 1) { return; // cannot do anything here } // in this case, we have a root pointer with just one pointer, we can just swap it out var node = page.GetNode(0); Debug.Assert(node->Flags == (NodeFlags.PageRef)); _tree.State.LeafPages = 1; _tree.State.BranchPages = 0; _tree.State.Depth = 1; _tree.State.PageCount = 1; var rootPage = _tx.ModifyPage(node->PageNumber, _tree, null); _tree.State.RootPageNumber = rootPage.PageNumber; Debug.Assert(rootPage.Dirty); _cursor.Pop(); _cursor.Push(rootPage); _tx.FreePage(page.PageNumber); }
private static unsafe void RenderPage(Transaction tx, Page page, TextWriter sw, string text, bool open) { sw.WriteLine( "<ul><li><input type='checkbox' id='page-{0}' {3} /><label for='page-{0}'>{4}: Page {0:#,#;;0} - {1} - {2:#,#;;0} entries</label><ul>", page.PageNumber, page.IsLeaf ? "Leaf" : "Branch", page.NumberOfEntries, open ? "checked" : "", text); for (int i = 0; i < page.NumberOfEntries; i++) { var nodeHeader = page.GetNode(i); if (page.IsLeaf) { var key = new Slice(nodeHeader).ToString(); sw.Write("<li>{0} {1} - size: {2:#,#}</li>", key, nodeHeader->Flags, NodeHeader.GetDataSize(tx, nodeHeader)); } else { var key = new Slice(nodeHeader).ToString(); var pageNum = nodeHeader->PageNumber; if (i == 0) key = "[smallest]"; RenderPage(tx, tx.GetReadOnlyPage(pageNum), sw, key, false); } } sw.WriteLine("</ul></li></ul>"); }
private void MergePages(Page parentPage, Page left, Page right) { for (int i = 0; i < right.NumberOfEntries; i++) { right.LastSearchPosition = i; var key = GetActualKey(right, right.LastSearchPositionOrLastEntry); var node = right.GetNode(i); left.CopyNodeDataToEndOfPage(node, key); } parentPage.RemoveNode(parentPage.LastSearchPositionOrLastEntry); // unlink the right sibling _tx.FreePage(right.PageNumber); }
private bool TryMergePages(Page parentPage, Page left, Page right) { TemporaryPage tmp; using (_tx.Environment.GetTemporaryPage(_tx, out tmp)) { var mergedPage = tmp.GetTempPage(left.KeysPrefixed); Memory.Copy(mergedPage.Base, left.Base, left.PageSize); var previousSearchPosition = right.LastSearchPosition; for (int i = 0; i < right.NumberOfEntries; i++) { right.LastSearchPosition = i; var key = GetActualKey(right, right.LastSearchPositionOrLastEntry); var node = right.GetNode(i); var prefixedKey = mergedPage.PrepareKeyToInsert(key, mergedPage.NumberOfEntries); if (mergedPage.HasSpaceFor(_tx, SizeOf.NodeEntryWithAnotherKey(node, prefixedKey) + Constants.NodeOffsetSize + SizeOf.NewPrefix(prefixedKey)) == false) { right.LastSearchPosition = previousSearchPosition; //previous position --> prevent mutation of parameter return false; } mergedPage.CopyNodeDataToEndOfPage(node, prefixedKey); } Memory.Copy(left.Base, mergedPage.Base, left.PageSize); } parentPage.RemoveNode(parentPage.LastSearchPositionOrLastEntry); // unlink the right sibling _tx.FreePage(right.PageNumber); return true; }
private Page SetupMoveOrMerge(Page page, Page parentPage) { Page sibling; if (parentPage.LastSearchPosition == 0) // we are the left most item { sibling = _tx.ModifyPage(parentPage.GetNode(1)->PageNumber,_tree, null); sibling.LastSearchPosition = 0; page.LastSearchPosition = page.NumberOfEntries; parentPage.LastSearchPosition = 1; } else // there is at least 1 page to our left { var beyondLast = parentPage.LastSearchPosition == parentPage.NumberOfEntries; if (beyondLast) parentPage.LastSearchPosition--; parentPage.LastSearchPosition--; sibling = _tx.ModifyPage(parentPage.GetNode(parentPage.LastSearchPosition)->PageNumber,_tree, null); parentPage.LastSearchPosition++; if (beyondLast) parentPage.LastSearchPosition++; sibling.LastSearchPosition = sibling.NumberOfEntries - 1; page.LastSearchPosition = 0; } return sibling; }
public bool MoveNext() { while (_currentPage != null) { _currentPage.LastSearchPosition++; if (_currentPage.LastSearchPosition < _currentPage.NumberOfEntries) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tx.GetReadOnlyPage(node->PageNumber); _currentPage.LastSearchPosition = 0; } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (this.ValidateCurrentKey(current, _currentPage) == false) return false; _currentPage.SetNodeKey(current, ref _currentInternalKey); _currentKey = _currentInternalKey.ToSlice(); return true;// there is another entry in this page } if (_cursor.PageCount == 0) break; _currentPage = _cursor.Pop(); } _currentPage = null; return false; }
private void RemoveBranchWithOneEntry(Page page, Page parentPage) { var pageRefNumber = page.GetNode(0)->PageNumber; NodeHeader* nodeHeader = null; for (int i = 0; i < parentPage.NumberOfEntries; i++) { nodeHeader = parentPage.GetNode(i); if (nodeHeader->PageNumber == page.PageNumber) break; } Debug.Assert(nodeHeader->PageNumber == page.PageNumber); nodeHeader->PageNumber = pageRefNumber; _tx.FreePage(page.PageNumber); }
private void RemoveLeafNode(Transaction tx, Page page, out ushort nodeVersion) { var node = page.GetNode(page.LastSearchPosition); nodeVersion = node->Version; if (node->Flags == (NodeFlags.PageRef)) // this is an overflow pointer { var overflowPage = tx.GetReadOnlyPage(node->PageNumber); var numberOfPages = tx.DataPager.GetNumberOfOverflowPages(overflowPage.OverflowSize); for (int i = 0; i < numberOfPages; i++) { tx.FreePage(overflowPage.PageNumber + i); } State.OverflowPages -= numberOfPages; State.PageCount -= numberOfPages; } page.RemoveNode(page.LastSearchPosition); }
/// <summary> /// For leaf pages, check the split point based on what /// fits where, since otherwise adding the node can fail. /// This check is only needed when the data items are /// relatively large, such that being off by one will /// make the difference between success or failure. /// It's also relevant if a page happens to be laid out /// such that one half of its nodes are all "small" and /// the other half of its nodes are "large." If the new /// item is also "large" and falls on the half with /// "large" nodes, it also may not fit. /// </summary> private int AdjustSplitPosition(Slice key, int len, Page page, int currentIndex, int splitIndex, ref bool newPosition) { int nodeSize = SizeOf.NodeEntry(AbstractPager.PageMaxSpace, key, len) + Constants.NodeOffsetSize; if (page.NumberOfEntries >= 20 && nodeSize <= AbstractPager.PageMaxSpace/16) { return splitIndex; } int pageSize = nodeSize; if (currentIndex <= splitIndex) { newPosition = false; for (int i = 0; i < splitIndex; i++) { NodeHeader* node = page.GetNode(i); pageSize += node->GetNodeSize(); pageSize += pageSize & 1; if (pageSize > AbstractPager.PageMaxSpace) { if (i <= currentIndex) { if (i < currentIndex) newPosition = true; return currentIndex; } return (ushort) i; } } } else { for (int i = page.NumberOfEntries - 1; i >= splitIndex; i--) { NodeHeader* node = page.GetNode(i); pageSize += node->GetNodeSize(); pageSize += pageSize & 1; if (pageSize > AbstractPager.PageMaxSpace) { if (i >= currentIndex) { newPosition = false; return currentIndex; } return (ushort) (i + 1); } } } return splitIndex; }
private static void EnsureValidLastSearchPosition(Page page, long referencedPageNumber, int originalLastSearchPosition) { if (page.NumberOfEntries <= originalLastSearchPosition || page.GetNode(originalLastSearchPosition)->PageNumber != referencedPageNumber) page.LastSearchPosition = page.NodePositionReferencing(referencedPageNumber); else page.LastSearchPosition = originalLastSearchPosition; }
private void MoveBranchNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch); var originalFromKeyStart = GetActualKey(from, from.LastSearchPositionOrLastEntry); to.EnsureHasSpaceFor(_tx, originalFromKeyStart, -1); var fromNode = from.GetNode(from.LastSearchPosition); long pageNum = fromNode->PageNumber; if (to.LastSearchPosition == 0) { // cannot add to left implicit side, adjust by moving the left node // to the right by one, then adding the new one as the left var implicitLeftKey = GetActualKey(to, 0); var leftPageNumber = to.GetNode(0)->PageNumber; to.AddPageRefNode(1, implicitLeftKey, leftPageNumber); to.AddPageRefNode(0, Slice.BeforeAllKeys, pageNum); to.RemoveNode(1); } else { to.AddPageRefNode(to.LastSearchPosition, originalFromKeyStart, pageNum); } if (from.LastSearchPositionOrLastEntry == 0) { // cannot just remove the left node, need to adjust those var rightPageNumber = from.GetNode(1)->PageNumber; from.RemoveNode(0); // remove the original implicit node from.RemoveNode(0); // remove the next node that we now turned into implicit from.EnsureHasSpaceFor(_tx, Slice.BeforeAllKeys, -1); from.AddPageRefNode(0, Slice.BeforeAllKeys, rightPageNumber); Debug.Assert(from.NumberOfEntries >= 2); } else { from.RemoveNode(from.LastSearchPositionOrLastEntry); } var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newKey = GetActualKey(from, 0); } parentPage.EnsureHasSpaceFor(_tx, newKey, -1); parentPage.AddPageRefNode(pos, newKey, pageNumber); }
private void MoveLeafNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch == false); var originalFromKeyStart = GetActualKey(from, from.LastSearchPositionOrLastEntry); var fromNode = from.GetNode(from.LastSearchPosition); byte* val = @from.Base + @from.KeysOffsets[@from.LastSearchPosition] + Constants.NodeHeaderSize + originalFromKeyStart.Size; var nodeVersion = fromNode->Version; // every time new node is allocated the version is increased, but in this case we do not want to increase it if (nodeVersion > 0) nodeVersion -= 1; var prefixedOriginalFromKey = to.PrepareKeyToInsert(originalFromKeyStart, to.LastSearchPosition); byte* dataPos; var fromDataSize = fromNode->DataSize; switch (fromNode->Flags) { case NodeFlags.PageRef: to.EnsureHasSpaceFor(_tx, prefixedOriginalFromKey, -1); dataPos = to.AddPageRefNode(to.LastSearchPosition, prefixedOriginalFromKey, fromNode->PageNumber); break; case NodeFlags.Data: to.EnsureHasSpaceFor(_tx, prefixedOriginalFromKey, fromDataSize); dataPos = to.AddDataNode(to.LastSearchPosition, prefixedOriginalFromKey, fromDataSize, nodeVersion); break; case NodeFlags.MultiValuePageRef: to.EnsureHasSpaceFor(_tx, prefixedOriginalFromKey, fromDataSize); dataPos = to.AddMultiValueNode(to.LastSearchPosition, prefixedOriginalFromKey, fromDataSize, nodeVersion); break; default: throw new NotSupportedException("Invalid node type to move: " + fromNode->Flags); } if(dataPos != null && fromDataSize > 0) Memory.Copy(dataPos, val, fromDataSize); from.RemoveNode(from.LastSearchPositionOrLastEntry); var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newSeparatorKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newSeparatorKey = GetActualKey(from, 0); } AddSeparatorToParentPage(parentPage, pageNumber, newSeparatorKey, pos); }
public bool MovePrev() { while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tx.GetReadOnlyPage(node->PageNumber); _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (this.ValidateCurrentKey(current) == false) return false; _currentKey.Set(current); return true;// there is another entry in this page } if (_cursor.PageCount == 0) break; _currentPage = _cursor.Pop(); } _currentPage = null; return false; }
private void MoveBranchNode(Page parentPage, Page from, Page to) { Debug.Assert(from.IsBranch); var originalFromKey = to.PrepareKeyToInsert(GetActualKey(from, from.LastSearchPositionOrLastEntry), to.LastSearchPosition); to.EnsureHasSpaceFor(_tx, originalFromKey, -1); var fromNode = from.GetNode(from.LastSearchPosition); long pageNum = fromNode->PageNumber; if (to.LastSearchPosition == 0) { // cannot add to left implicit side, adjust by moving the left node // to the right by one, then adding the new one as the left NodeHeader* actualKeyNode; var implicitLeftKey = GetActualKey(to, 0, out actualKeyNode); var implicitLeftNode = to.GetNode(0); var leftPageNumber = implicitLeftNode->PageNumber; MemorySlice implicitLeftKeyToInsert; if (implicitLeftNode == actualKeyNode) { // no need to create a prefix, just use the existing prefixed key from the node // this also prevents from creating a prefix which is the full key given in 'implicitLeftKey' if (_tree.KeysPrefixing) implicitLeftKeyToInsert = new PrefixedSlice(actualKeyNode); else implicitLeftKeyToInsert = new Slice(actualKeyNode); } else implicitLeftKeyToInsert = to.PrepareKeyToInsert(implicitLeftKey, 1); to.EnsureHasSpaceFor(_tx, implicitLeftKeyToInsert, -1); to.AddPageRefNode(1, implicitLeftKeyToInsert, leftPageNumber); to.ChangeImplicitRefPageNode(pageNum); // setup the new implicit node } else { to.AddPageRefNode(to.LastSearchPosition, originalFromKey, pageNum); } if (from.LastSearchPositionOrLastEntry == 0) { var rightPageNumber = from.GetNode(1)->PageNumber; from.RemoveNode(0); // remove the original implicit node from.ChangeImplicitRefPageNode(rightPageNumber); // setup the new implicit node Debug.Assert(from.NumberOfEntries >= 2); } else { from.RemoveNode(from.LastSearchPositionOrLastEntry); } var pos = parentPage.LastSearchPositionOrLastEntry; parentPage.RemoveNode(pos); var newSeparatorKey = GetActualKey(to, 0); // get the next smallest key it has now var pageNumber = to.PageNumber; if (parentPage.GetNode(0)->PageNumber == to.PageNumber) { pageNumber = from.PageNumber; newSeparatorKey = GetActualKey(from, 0); } AddSeparatorToParentPage(parentPage, pageNumber, newSeparatorKey, pos); }
private void ExpandMultiTreeNestedPageSize(Transaction tx, Slice key, Slice value, byte* nestedPagePtr, ushort newSize, int currentSize) { Debug.Assert(newSize > currentSize); TemporaryPage tmp; using (tx.Environment.GetTemporaryPage(tx, out tmp)) { var tempPagePointer = tmp.TempPagePointer; Memory.Copy(tempPagePointer, nestedPagePtr, currentSize); Delete(key); // release our current page Page nestedPage = new Page(tempPagePointer, "multi tree", (ushort)currentSize); var ptr = DirectAdd(key, newSize); var newNestedPage = new Page(ptr, "multi tree", newSize) { Lower = (ushort)Constants.PageHeaderSize, Upper = KeysPrefixing ? (ushort) (newSize - Constants.PrefixInfoSectionSize) : newSize, Flags = KeysPrefixing ? PageFlags.Leaf | PageFlags.KeysPrefixed : PageFlags.Leaf, PageNumber = -1L // mark as invalid page number }; newNestedPage.ClearPrefixInfo(); MemorySlice nodeKey = nestedPage.CreateNewEmptyKey(); for (int i = 0; i < nestedPage.NumberOfEntries; i++) { var nodeHeader = nestedPage.GetNode(i); nestedPage.SetNodeKey(nodeHeader, ref nodeKey); nodeKey = newNestedPage.PrepareKeyToInsert(nodeKey, i); newNestedPage.AddDataNode(i, nodeKey, 0, (ushort)(nodeHeader->Version - 1)); // we dec by one because AdddataNode will inc by one, and we don't want to change those values } newNestedPage.Search(value); newNestedPage.AddDataNode(newNestedPage.LastSearchPosition, newNestedPage.PrepareKeyToInsert(value, newNestedPage.LastSearchPosition), 0, 0); } }
private MemorySlice GetActualKey(Page page, int pos, out NodeHeader* node) { node = page.GetNode(pos); var key = page.GetNodeKey(node); while (key.KeyLength == 0) { Debug.Assert(page.IsBranch); page = _tx.GetReadOnlyPage(node->PageNumber); node = page.GetNode(0); key = page.GetNodeKey(node); } return key; }
private void RemoveBranchWithOneEntry(Page page, Page parentPage) { Debug.Assert(page.NumberOfEntries == 1); var pageRefNumber = page.GetNode(0)->PageNumber; NodeHeader* nodeHeader = null; for (int i = 0; i < parentPage.NumberOfEntries; i++) { nodeHeader = parentPage.GetNode(i); if (nodeHeader->PageNumber == page.PageNumber) break; } Debug.Assert(nodeHeader->PageNumber == page.PageNumber); nodeHeader->PageNumber = pageRefNumber; if (_cursor.CurrentPage.PageNumber == page.PageNumber) { _cursor.Pop(); _cursor.Push(_tx.GetReadOnlyPage(pageRefNumber)); } _tx.FreePage(page.PageNumber); }
private byte* SplitPageInHalf(Page rightPage) { int currentIndex = _page.LastSearchPosition; bool newPosition = true; int splitIndex = _page.NumberOfEntries/2; if (currentIndex < splitIndex) newPosition = false; PrefixNode[] prefixes = null; if (_tree.KeysPrefixing && _page.HasPrefixes) { prefixes = _page.GetPrefixes(); } if (_page.IsLeaf || prefixes != null) { splitIndex = AdjustSplitPosition(currentIndex, splitIndex, prefixes, ref newPosition); } var currentKey = _page.GetNodeKey(splitIndex); // here the current key is the separator key and can go either way, so // use newPosition to decide if it stays on the left node or moves to the right MemorySlice seperatorKey; if (currentIndex == splitIndex && newPosition) { seperatorKey = currentKey.Compare(_newKey) < 0 ? currentKey : _newKey; } else { seperatorKey = currentKey; } AddSeparatorToParentPage(rightPage.PageNumber, seperatorKey); MemorySlice instance = _page.CreateNewEmptyKey(); if (prefixes != null) { for (int i = 0; i < prefixes.Length; i++) { var prefix = prefixes[i]; rightPage.WritePrefix(new Slice(prefix.ValuePtr, prefix.PrefixLength), i); } } // move the actual entries from page to right page ushort nKeys = _page.NumberOfEntries; for (int i = splitIndex; i < nKeys; i++) { NodeHeader* node = _page.GetNode(i); if (_page.IsBranch && rightPage.NumberOfEntries == 0) { rightPage.CopyNodeDataToEndOfPage(node, _tree.KeysPrefixing ? (MemorySlice) PrefixedSlice.BeforeAllKeys : Slice.BeforeAllKeys); } else { _page.SetNodeKey(node, ref instance); var key = rightPage.PrepareKeyToInsert(instance, rightPage.NumberOfEntries); rightPage.CopyNodeDataToEndOfPage(node, key); } } _page.Truncate(_tx, splitIndex); // actually insert the new key try { return (currentIndex > splitIndex || newPosition && currentIndex == splitIndex) ? InsertNewKey(rightPage) : InsertNewKey(_page); } catch (InvalidOperationException e) { if (e.Message.StartsWith("The page is full and cannot add an entry")) { var debugInfo = new StringBuilder(); debugInfo.AppendFormat("\r\n_tree.Name: {0}\r\n", _tree.Name); debugInfo.AppendFormat("_newKey: {0}, _len: {1}, needed space: {2}\r\n", _newKey, _len, _page.GetRequiredSpace(_newKey, _len)); debugInfo.AppendFormat("currentKey: {0}, seperatorKey: {1}\r\n", currentKey, seperatorKey); debugInfo.AppendFormat("currentIndex: {0}\r\n", currentIndex); debugInfo.AppendFormat("splitIndex: {0}\r\n", splitIndex); debugInfo.AppendFormat("newPosition: {0}\r\n", newPosition); debugInfo.AppendFormat("_page info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", _page.Flags, _page.NumberOfEntries, _page.SizeLeft, _page.CalcSizeLeft()); for (int i = 0; i < _page.NumberOfEntries; i++) { var node = _page.GetNode(i); var key = _page.GetNodeKey(node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page"); } debugInfo.AppendFormat("rightPage info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", rightPage.Flags, rightPage.NumberOfEntries, rightPage.SizeLeft, rightPage.CalcSizeLeft()); for (int i = 0; i < rightPage.NumberOfEntries; i++) { var node = rightPage.GetNode(i); var key = rightPage.GetNodeKey(node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page"); } throw new InvalidOperationException(debugInfo.ToString(), e); } throw; } }
private string GatherDetailedDebugInfo(Page rightPage, MemorySlice currentKey, MemorySlice seperatorKey, int currentIndex, int splitIndex, bool toRight) { var debugInfo = new StringBuilder(); debugInfo.AppendFormat("\r\n_tree.Name: {0}\r\n", _tree.Name); debugInfo.AppendFormat("_newKey: {0}, _len: {1}, needed space: {2}\r\n", _newKey, _len, _page.GetRequiredSpace(_newKey, _len)); debugInfo.AppendFormat("key at LastSearchPosition: {0}, current key: {1}, seperatorKey: {2}\r\n", _page.GetNodeKey(_page.LastSearchPosition), currentKey, seperatorKey); debugInfo.AppendFormat("currentIndex: {0}\r\n", currentIndex); debugInfo.AppendFormat("splitIndex: {0}\r\n", splitIndex); debugInfo.AppendFormat("toRight: {0}\r\n", toRight); debugInfo.AppendFormat("_page info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", _page.Flags, _page.NumberOfEntries, _page.SizeLeft, _page.CalcSizeLeft()); for (int i = 0; i < _page.NumberOfEntries; i++) { var node = _page.GetNode(i); var key = _page.GetNodeKey(node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page"); } debugInfo.AppendFormat("rightPage info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", rightPage.Flags, rightPage.NumberOfEntries, rightPage.SizeLeft, rightPage.CalcSizeLeft()); for (int i = 0; i < rightPage.NumberOfEntries; i++) { var node = rightPage.GetNode(i); var key = rightPage.GetNodeKey(node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page"); } return debugInfo.ToString(); }
private static unsafe string GetBranchNodeString(int i, Slice key, Page p, NodeHeader* node) { string keyStr; if (i == 0 && key.Size == 0) { key.Set(p.GetNode(1)); keyStr = "(lt " + key + ")"; } else { key.Set(node); keyStr = key.ToString(); } return MaxString(keyStr, 25); }