public long MultiCount(Slice key) { TreeNodeHeader *node; var page = FindPageFor(key, out node); if (page == null || page.LastMatch != 0) { return(0); } Debug.Assert(node != null); Slice fetchedNodeKey; using (TreeNodeHeader.ToSlicePtr(_llt.Allocator, node, out fetchedNodeKey)) { if (SliceComparer.Equals(fetchedNodeKey, key) == false) { VoronUnrecoverableErrorException.Raise(_llt.Environment, "Was unable to retrieve the correct node. Data corruption possible"); } } if (node->Flags == TreeNodeFlags.MultiValuePageRef) { var tree = OpenMultiValueTree(key, node); return(tree.State.NumberOfEntries); } var nestedPage = new TreePage(DirectAccessFromHeader(node), (ushort)GetDataSize(node)); return(nestedPage.NumberOfEntries); }
public int GetDataSize(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p.LastMatch != 0) { return(-1); } if (node == null) { return(-1); } Slice nodeKey; using (TreeNodeHeader.ToSlicePtr(_llt.Allocator, node, out nodeKey)) { if (!SliceComparer.EqualsInline(nodeKey, key)) { return(-1); } } return(GetDataSize(node)); }
private string GatherDetailedDebugInfo(TreePage rightPage, Slice currentKey, Slice seperatorKey, int currentIndex, int splitIndex, bool toRight) { var debugInfo = new StringBuilder(); debugInfo.AppendFormat("\r\n_tree.Name: {0}\r\n", _tree.Name); debugInfo.AppendFormat("_newKey: {0}, _len: {1}, needed space: {2}\r\n", _newKey, _len, _page.GetRequiredSpace(_newKey, _len)); debugInfo.AppendFormat("key at LastSearchPosition: {0}, current key: {1}, seperatorKey: {2}\r\n", _page.GetNodeKey(_tx, _page.LastSearchPosition), currentKey, seperatorKey); debugInfo.AppendFormat("currentIndex: {0}\r\n", currentIndex); debugInfo.AppendFormat("splitIndex: {0}\r\n", splitIndex); debugInfo.AppendFormat("toRight: {0}\r\n", toRight); debugInfo.AppendFormat("_page info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", _page.TreeFlags, _page.NumberOfEntries, _page.SizeLeft, _page.CalcSizeLeft()); for (int i = 0; i < _page.NumberOfEntries; i++) { var node = _page.GetNode(i); var key = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == TreeNodeFlags.Data ? "Size" : "Page"); } debugInfo.AppendFormat("rightPage info: flags - {0}, # of entries {1}, size left: {2}, calculated size left: {3}\r\n", rightPage.TreeFlags, rightPage.NumberOfEntries, rightPage.SizeLeft, rightPage.CalcSizeLeft()); for (int i = 0; i < rightPage.NumberOfEntries; i++) { var node = rightPage.GetNode(i); var key = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node); debugInfo.AppendFormat("{0} - {2} {1}\r\n", key, node->DataSize, node->Flags == TreeNodeFlags.Data ? "Size" : "Page"); } return(debugInfo.ToString()); }
//TODO: write a test for this public long MultiCount(Slice key) { TreeNodeHeader *node; var page = FindPageFor(key, out node); if (page == null || page.LastMatch != 0) { return(0); } Debug.Assert(node != null); var fetchedNodeKey = TreeNodeHeader.ToSlicePtr(_llt.Allocator, node); if (SliceComparer.Equals(fetchedNodeKey, key) == false) { throw new InvalidDataException("Was unable to retrieve the correct node. Data corruption possible"); } if (node->Flags == TreeNodeFlags.MultiValuePageRef) { var tree = OpenMultiValueTree(key, node); return(tree.State.NumberOfEntries); } var nestedPage = new TreePage(TreeNodeHeader.DirectAccess(_llt, node), "multi tree", (ushort)TreeNodeHeader.GetDataSize(_llt, node)); return(nestedPage.NumberOfEntries); }
public IIterator MultiRead(Slice key) { TreeNodeHeader *node; var page = FindPageFor(key, out node); if (page == null || page.LastMatch != 0) { return(new EmptyIterator()); } Debug.Assert(node != null); Slice fetchedNodeKey; using (TreeNodeHeader.ToSlicePtr(_llt.Allocator, node, out fetchedNodeKey)) { if (SliceComparer.Equals(fetchedNodeKey, key) == false) { VoronUnrecoverableErrorException.Raise(_llt, "Was unable to retrieve the correct node. Data corruption possible"); } } if (node->Flags == TreeNodeFlags.MultiValuePageRef) { var tree = OpenMultiValueTree(key, node); return(tree.Iterate(true)); } var ptr = DirectAccessFromHeader(node); var nestedPage = new TreePage(ptr, (ushort)GetDataSize(node)); return(new TreePageIterator(_llt, key, this, nestedPage)); }
public IIterator MultiRead(Slice key) { TreeNodeHeader *node; var page = FindPageFor(key, out node); if (page == null || page.LastMatch != 0) { return(new EmptyIterator()); } Debug.Assert(node != null); var fetchedNodeKey = TreeNodeHeader.ToSlicePtr(_llt.Allocator, node); if (SliceComparer.Equals(fetchedNodeKey, key) == false) { throw new InvalidDataException("Was unable to retrieve the correct node. Data corruption possible"); } if (node->Flags == TreeNodeFlags.MultiValuePageRef) { var tree = OpenMultiValueTree(key, node); return(tree.Iterate(false)); } var ptr = TreeNodeHeader.DirectAccess(_llt, node); var dataSize = (ushort)TreeNodeHeader.GetDataSize(_llt, node); var nestedPage = new TreePage(ptr, "multi tree", dataSize); return(new TreePageIterator(_llt, key, this, nestedPage)); }
public bool MovePrev() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { // In here we will also have the 'current' page (even if we are traversing a compressed node). if (_prefetch) { MaybePrefetchPagesReferencedBy(_currentPage); } _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tree.GetReadOnlyTreePage(node->PageNumber); if (_currentPage.IsCompressed) { DecompressedCurrentPage(); } _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; } // We should be prefetching data pages down here. if (_prefetch) { MaybePrefetchPagesReferencedBy(_currentPage); } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (DoRequireValidation && this.ValidateCurrentKey(_tx, current) == false) { return(false); } _prevKeyScope.Dispose(); _prevKeyScope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current, out _currentInternalKey); _currentKey = _currentInternalKey; return(true);// there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public bool Seek(Slice key) { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } TreeNodeHeader * node; Func <Slice, TreeCursor> constructor; _currentPage = _tree.FindPageFor(key, node: out node, cursor: out constructor, allowCompressed: false); _cursor = constructor(key); _cursor.Pop(); if (node != null) { _prevKeyScope.Dispose(); _prevKeyScope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out _currentInternalKey); _currentKey = _currentInternalKey; if (DoRequireValidation) { return(this.ValidateCurrentKey(_tx, Current)); } return(true); } // The key is not found in the db, but we are Seek()ing for equals or starts with. // We know that the exact value isn't there, but it is possible that the next page has values // that is actually greater than the key, so we need to check it as well. _currentPage.LastSearchPosition = _currentPage.NumberOfEntries; // force next MoveNext to move to the next _page_. return(MoveNext()); }
public int GetCurrentDataSize() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } return(TreeNodeHeader.GetDataSize(_tx, Current)); }
public List <long> AllPages() { var results = new List <long>(); var stack = new Stack <TreePage>(); var root = _llt.GetReadOnlyTreePage(State.RootPageNumber); stack.Push(root); Slice key = default(Slice); while (stack.Count > 0) { var p = stack.Pop(); results.Add(p.PageNumber); for (int i = 0; i < p.NumberOfEntries; i++) { var node = p.GetNode(i); var pageNumber = node->PageNumber; if (p.IsBranch) { stack.Push(_llt.GetReadOnlyTreePage(pageNumber)); } else if (node->Flags == TreeNodeFlags.PageRef) { // This is an overflow page var overflowPage = _llt.GetReadOnlyTreePage(pageNumber); var numberOfPages = _llt.DataPager.GetNumberOfOverflowPages(overflowPage.OverflowSize); for (long j = 0; j < numberOfPages; ++j) { results.Add(overflowPage.PageNumber + j); } } else if (node->Flags == TreeNodeFlags.MultiValuePageRef) { key = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, ByteStringType.Mutable); var tree = OpenMultiValueTree(key, node); results.AddRange(tree.AllPages()); } else { if ((State.Flags & TreeFlags.FixedSizeTrees) == TreeFlags.FixedSizeTrees) { var valueReader = TreeNodeHeader.Reader(_llt, node); var valueSize = ((FixedSizeTreeHeader.Embedded *)valueReader.Base)->ValueSize; var fixedSizeTreeName = p.GetNodeKey(_llt, i); var fixedSizeTree = new FixedSizeTree(_llt, this, fixedSizeTreeName, valueSize); var pages = fixedSizeTree.AllPages(); results.AddRange(pages); } } } } return(results); }
public ByteStringContext.Scope GetNodeKey(LowLevelTransaction tx, int nodeNumber, ByteStringType type /* = ByteStringType.Mutable | ByteStringType.External*/, out Slice result) { var node = GetNode(nodeNumber); // This will ensure that we can create a copy or just use the pointer instead. if ((type & ByteStringType.External) == 0) { return(TreeNodeHeader.ToSlice(tx.Allocator, node, type, out result)); } return(TreeNodeHeader.ToSlicePtr(tx.Allocator, node, type, out result)); }
public string Dump() { var sb = new StringBuilder(); for (var i = 0; i < NumberOfEntries; i++) { var node = GetNode(i); sb.Append(TreeNodeHeader.ToDebugString(node)).Append(", "); } return(sb.ToString()); }
public ReadResult Read(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p.LastMatch != 0) { return(null); } return(new ReadResult(TreeNodeHeader.Reader(_llt, node), node->Version)); }
private byte *OptimizedOnlyMoveNewValueToTheRightPage(TreePage rightPage) { // when we get a split at the end of the page, we take that as a hint that the user is doing // sequential inserts, at that point, we are going to keep the current page as is and create a new // page, this will allow us to do minimal amount of work to get the best density TreePage branchOfSeparator; byte *pos; if (_page.IsBranch) { if (_page.NumberOfEntries > 2) { // here we steal the last entry from the current page so we maintain the implicit null left entry TreeNodeHeader *node = _page.GetNode(_page.NumberOfEntries - 1); Debug.Assert(node->Flags == TreeNodeFlags.PageRef); rightPage.AddPageRefNode(0, Slices.BeforeAllKeys, node->PageNumber); pos = AddNodeToPage(rightPage, 1); Slice separatorKey; using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out separatorKey)) { AddSeparatorToParentPage(rightPage.PageNumber, separatorKey, out branchOfSeparator); } _page.RemoveNode(_page.NumberOfEntries - 1); } else { _tree.FreePage(rightPage); // return the unnecessary right page pos = AddSeparatorToParentPage(_pageNumber, _newKey, out branchOfSeparator); if (_cursor.CurrentPage.PageNumber != branchOfSeparator.PageNumber) { _cursor.Push(branchOfSeparator); } return(pos); } } else { AddSeparatorToParentPage(rightPage.PageNumber, _newKey, out branchOfSeparator); pos = AddNodeToPage(rightPage, 0); } _cursor.Push(rightPage); return(pos); }
private void EnsureNestedPagePointer(TreePage page, TreeNodeHeader *currentItem, ref TreePage nestedPage, ref byte *nestedPagePtr) { var movedItem = page.GetNode(page.LastSearchPosition); if (movedItem == currentItem) { return; } // HasSpaceFor could called Defrag internally and read item has moved // need to ensure the nested page has a valid pointer nestedPagePtr = TreeNodeHeader.DirectAccess(_llt, movedItem); nestedPage = new TreePage(nestedPagePtr, "multi tree", (ushort)TreeNodeHeader.GetDataSize(_llt, movedItem)); }
private Slice GetActualKey(TreePage page, int pos, out TreeNodeHeader *node) { node = page.GetNode(pos); var key = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node); while (key.Size == 0) { Debug.Assert(page.IsBranch); page = _tx.GetReadOnlyTreePage(node->PageNumber); node = page.GetNode(0); key = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node); } return(key); }
private ByteStringContext.ExternalScope GetActualKey(TreePage page, int pos, out TreeNodeHeader *node, out Slice key) { node = page.GetNode(pos); var scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key); while (key.Size == 0) { Debug.Assert(page.IsBranch); page = _tree.GetReadOnlyTreePage(node->PageNumber); node = page.GetNode(0); scope.Dispose(); scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key); } return(scope); }
public bool MovePrev() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } while (true) { _currentPage.LastSearchPosition--; if (_currentPage.LastSearchPosition >= 0) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tree.GetReadOnlyTreePage(node->PageNumber); _currentPage.LastSearchPosition = _currentPage.NumberOfEntries - 1; if (_prefetch && _currentPage.IsLeaf) { MaybePrefetchOverflowPages(_currentPage); } } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (DoRequireValidation && this.ValidateCurrentKey(_tx, current) == false) { return(false); } _prevKeyScope.Dispose(); _prevKeyScope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current, out _currentInternalKey); _currentKey = _currentInternalKey; return(true);// there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public ushort ReadVersion(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p == null || p.LastMatch != 0) { return(0); } if (node == null || !SliceComparer.EqualsInline(TreeNodeHeader.ToSlicePtr(_llt.Allocator, node), key)) { return(0); } return(node->Version); }
public int GetDataSize(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p == null || p.LastMatch != 0) { return(-1); } if (node == null || !SliceComparer.EqualsInline(TreeNodeHeader.ToSlicePtr(_llt.Allocator, node), key)) { return(-1); } return(TreeNodeHeader.GetDataSize(_llt, node)); }
private ActualKeyScope GetActualKey(TreePage page, int pos, out TreeNodeHeader *node, out Slice key) { DecompressedLeafPage decompressedLeafPage = null; node = page.GetNode(pos); var scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key); while (key.Size == 0) { Debug.Assert(page.IsBranch); page = _tree.GetReadOnlyTreePage(node->PageNumber); if (page.IsCompressed == false) { node = page.GetNode(0); } else { decompressedLeafPage?.Dispose(); decompressedLeafPage = _tree.DecompressPage(page, skipCache: true); if (decompressedLeafPage.NumberOfEntries > 0) { node = decompressedLeafPage.GetNode(0); } else { // we have empty page after decompression (each compressed entry has a corresponding CompressionTombstone) // we can safely use the node key of first tombstone (they have proper order) node = page.GetNode(0); } } scope.Dispose(); scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key); } return(new ActualKeyScope { DecompressedLeafPage = decompressedLeafPage, ExternalScope = scope }); }
public unsafe static bool ValidateCurrentKey <T>(this T self, LowLevelTransaction tx, TreeNodeHeader *node) where T : IIterator { if (self.RequiredPrefix.HasValue) { var currentKey = TreeNodeHeader.ToSlicePtr(tx.Allocator, node); if (SliceComparer.StartWith(currentKey, self.RequiredPrefix) == false) { return(false); } } if (self.MaxKey.HasValue) { var currentKey = TreeNodeHeader.ToSlicePtr(tx.Allocator, node); if (SliceComparer.CompareInline(currentKey, self.MaxKey) >= 0) { return(false); } } return(true); }
public bool MoveNext() { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } while (_currentPage != null) { _currentPage.LastSearchPosition++; if (_currentPage.LastSearchPosition < _currentPage.NumberOfEntries) { // run out of entries, need to select the next page... while (_currentPage.IsBranch) { _cursor.Push(_currentPage); var node = _currentPage.GetNode(_currentPage.LastSearchPosition); _currentPage = _tx.GetReadOnlyTreePage(node->PageNumber); _currentPage.LastSearchPosition = 0; } var current = _currentPage.GetNode(_currentPage.LastSearchPosition); if (DoRequireValidation && this.ValidateCurrentKey(_tx, current) == false) { return(false); } _currentInternalKey = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current, ByteStringType.Mutable); _currentKey = _currentInternalKey; return(true);// there is another entry in this page } if (_cursor.PageCount == 0) { break; } _currentPage = _cursor.Pop(); } _currentPage = null; return(false); }
public void Truncate(LowLevelTransaction tx, int i) { if (i >= NumberOfEntries) { return; } // when truncating, we copy the values to a tmp page // this has the effect of compacting the page data and avoiding // internal page fragmentation TemporaryPage tmp; using (PageSize <= tx.Environment.Options.PageSize ? tx.Environment.GetTemporaryPage(tx, out tmp) : tx.Environment.DecompressionBuffers.GetTemporaryPage(tx, PageSize, out tmp)) { var copy = tmp.GetTempPage(); copy.TreeFlags = TreeFlags; var slice = default(Slice); for (int j = 0; j < i; j++) { var node = GetNode(j); using (TreeNodeHeader.ToSlicePtr(tx.Allocator, node, out slice)) copy.CopyNodeDataToEndOfPage(node, slice); } Memory.Copy(Base + Constants.Tree.PageHeaderSize, copy.Base + Constants.Tree.PageHeaderSize, PageSize - Constants.Tree.PageHeaderSize); Upper = copy.Upper; Lower = copy.Lower; } if (LastSearchPosition > i) { LastSearchPosition = i; } }
private void ExpandMultiTreeNestedPageSize(Slice key, Slice value, byte *nestedPagePtr, ushort newSize, int currentSize) { Debug.Assert(newSize > currentSize); TemporaryPage tmp; using (_llt.Environment.GetTemporaryPage(_llt, out tmp)) { var tempPagePointer = tmp.TempPagePointer; Memory.Copy(tempPagePointer, nestedPagePtr, currentSize); Delete(key); // release our current page TreePage nestedPage = new TreePage(tempPagePointer, (ushort)currentSize); byte *ptr; using (DirectAdd(key, newSize, out ptr)) { var newNestedPage = new TreePage(ptr, newSize) { Lower = (ushort)Constants.Tree.PageHeaderSize, Upper = newSize, TreeFlags = TreePageFlags.Leaf, PageNumber = -1L, // mark as invalid page number Flags = 0 }; ByteStringContext allocator = _llt.Allocator; for (int i = 0; i < nestedPage.NumberOfEntries; i++) { var nodeHeader = nestedPage.GetNode(i); Slice nodeKey; using (TreeNodeHeader.ToSlicePtr(allocator, nodeHeader, out nodeKey)) newNestedPage.AddDataNode(i, nodeKey, 0); } newNestedPage.Search(_llt, value); newNestedPage.AddDataNode(newNestedPage.LastSearchPosition, value, 0); } } }
private void ExpandMultiTreeNestedPageSize(Slice key, Slice value, byte *nestedPagePtr, ushort newSize, int currentSize) { Debug.Assert(newSize > currentSize); TemporaryPage tmp; using (_llt.Environment.GetTemporaryPage(_llt, out tmp)) { var tempPagePointer = tmp.TempPagePointer; Memory.Copy(tempPagePointer, nestedPagePtr, currentSize); Delete(key); // release our current page TreePage nestedPage = new TreePage(tempPagePointer, "multi tree", (ushort)currentSize); var ptr = DirectAdd(key, newSize); var newNestedPage = new TreePage(ptr, "multi tree", newSize) { Lower = (ushort)Constants.TreePageHeaderSize, Upper = newSize, TreeFlags = TreePageFlags.Leaf, PageNumber = -1L // mark as invalid page number }; ByteStringContext allocator = _llt.Allocator; for (int i = 0; i < nestedPage.NumberOfEntries; i++) { var nodeHeader = nestedPage.GetNode(i); Slice nodeKey = TreeNodeHeader.ToSlicePtr(allocator, nodeHeader); newNestedPage.AddDataNode(i, nodeKey, 0, (ushort)(nodeHeader->Version - 1)); // we dec by one because AdddataNode will inc by one, and we don't want to change those values nodeKey.Release(allocator); } newNestedPage.Search(_llt, value); newNestedPage.AddDataNode(newNestedPage.LastSearchPosition, value, 0, 0); } }
private bool TrySetPosition() { if (_disposed) { throw new ObjectDisposedException("PageIterator"); } if (_page.LastSearchPosition < 0 || _page.LastSearchPosition >= _page.NumberOfEntries) { return(false); } var current = _page.GetNode(_page.LastSearchPosition); if (DoRequireValidation && this.ValidateCurrentKey(_tx, current) == false) { return(false); } _currentInternalKey = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current); _currentKey = _currentInternalKey; return(true); }
public bool Seek(Slice key) { if (_disposed) { throw new ObjectDisposedException("PageIterator"); } var current = _page.Search(_tx, key); if (current == null) { return(false); } _currentInternalKey = TreeNodeHeader.ToSlicePtr(_tx.Allocator, current); _currentKey = _currentInternalKey; if (DoRequireValidation) { return(this.ValidateCurrentKey(_tx, current)); } return(true); }
public bool Seek(Slice key) { if (_disposed) { throw new ObjectDisposedException("TreeIterator " + _tree.Name); } TreeNodeHeader * node; Func <TreeCursor> constructor; _currentPage = _tree.FindPageFor(key, out node, out constructor); _cursor = constructor(); _cursor.Pop(); if (node != null) { _currentInternalKey = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, ByteStringType.Mutable); _currentKey = _currentInternalKey; // TODO: Check here if aliasing via pointer is the intended use. if (DoRequireValidation) { return(this.ValidateCurrentKey(_tx, Current)); } else { return(true); } } // The key is not found in the db, but we are Seek()ing for equals or starts with. // We know that the exact value isn't there, but it is possible that the next page has values // that is actually greater than the key, so we need to check it as well. _currentPage.LastSearchPosition = _currentPage.NumberOfEntries; // force next MoveNext to move to the next _page_. return(MoveNext()); }
private void HandleUncompressedNodes(DecompressedLeafPage decompressedPage, TreePage p, DecompressionUsage usage) { int numberOfEntries = p.NumberOfEntries; for (var i = 0; i < numberOfEntries; i++) { var uncompressedNode = p.GetNode(i); Slice nodeKey; using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, uncompressedNode, out nodeKey)) { if (uncompressedNode->Flags == TreeNodeFlags.CompressionTombstone) { HandleTombstone(decompressedPage, nodeKey, usage); continue; } if (decompressedPage.HasSpaceFor(_llt, TreeSizeOf.NodeEntry(uncompressedNode)) == false) { throw new InvalidOperationException("Could not add uncompressed node to decompressed page"); } int index; if (decompressedPage.NumberOfEntries > 0) { Slice lastKey; using (decompressedPage.GetNodeKey(_llt, decompressedPage.NumberOfEntries - 1, out lastKey)) { // optimization: it's very likely that uncompressed nodes have greater keys than compressed ones // when we insert sequential keys var cmp = SliceComparer.CompareInline(nodeKey, lastKey); if (cmp > 0) { index = decompressedPage.NumberOfEntries; } else { if (cmp == 0) { // update of the last entry, just decrement NumberOfEntries in the page and // put it at the last position index = decompressedPage.NumberOfEntries - 1; decompressedPage.Lower -= Constants.Tree.NodeOffsetSize; } else { index = decompressedPage.NodePositionFor(_llt, nodeKey); if (decompressedPage.LastMatch == 0) // update { decompressedPage.RemoveNode(index); if (usage == DecompressionUsage.Write) { State.NumberOfEntries--; } } } } } } else { // all uncompressed nodes were compresion tombstones which deleted all entries from the decompressed page index = 0; } switch (uncompressedNode->Flags) { case TreeNodeFlags.PageRef: decompressedPage.AddPageRefNode(index, nodeKey, uncompressedNode->PageNumber); break; case TreeNodeFlags.Data: var pos = decompressedPage.AddDataNode(index, nodeKey, uncompressedNode->DataSize); var nodeValue = TreeNodeHeader.Reader(_llt, uncompressedNode); Memory.Copy(pos, nodeValue.Base, nodeValue.Length); break; case TreeNodeFlags.MultiValuePageRef: throw new NotSupportedException("Multi trees do not support compression"); default: throw new NotSupportedException("Invalid node type to copye: " + uncompressedNode->Flags); } } } }