Example #1
0
        private DecompressedLeafPage ReuseCachedPage(DecompressedLeafPage cached, DecompressionUsage usage, ref DecompressionInput input)
        {
            DecompressedLeafPage result;

            var sizeDiff = input.DecompressedPageSize - cached.PageSize;

            if (sizeDiff > 0)
            {
                result = _llt.Environment.DecompressionBuffers.GetPage(_llt, input.DecompressedPageSize, usage, input.Page);

                Memory.Copy(result.Base, cached.Base, cached.Lower);
                Memory.Copy(result.Base + cached.Upper + sizeDiff,
                            cached.Base + cached.Upper,
                            cached.PageSize - cached.Upper);

                result.Upper += (ushort)sizeDiff;

                for (var i = 0; i < result.NumberOfEntries; i++)
                {
                    result.KeysOffsets[i] += (ushort)sizeDiff;
                }
            }
            else
            {
                result = cached;
            }

            return(result);
        }
Example #2
0
        public byte *Execute()
        {
            using (DisableFreeSpaceUsageIfSplittingRootTree())
            {
                TreePage rightPage = _tree.NewPage(_page.TreeFlags, _page.PageNumber);

                if (_cursor.PageCount == 0) // we need to do a root split
                {
                    TreePage newRootPage = _tree.NewPage(TreePageFlags.Branch, _page.PageNumber);
                    _cursor.Push(newRootPage);
                    _tree.State.RootPageNumber = newRootPage.PageNumber;
                    _tree.State.Depth++;

                    // now add implicit left page
                    newRootPage.AddPageRefNode(0, Slices.BeforeAllKeys, _page.PageNumber);
                    _parentPage = newRootPage;
                    _parentPage.LastSearchPosition++;
                }
                else
                {
                    // we already popped the page, so the current one on the stack is the parent of the page

                    _parentPage = _tree.ModifyPage(_cursor.CurrentPage);

                    _cursor.Update(_cursor.Pages, _parentPage);
                }

                if (_page.IsLeaf)
                {
                    _tree.ClearPagesCache();
                }

                if (_page.IsCompressed)
                {
                    _pageDecompressed = _tree.DecompressPage(_page);
                    _pageDecompressed.Search(_tx, _newKey);
                    if (_pageDecompressed.LastMatch == 0)
                    {
                        // we are going to insert the value in a bit, but it might have
                        // been in the compressed portion and not removed by the calling
                        // code
                        _tree.RemoveLeafNode(_pageDecompressed);
                    }
                    _page = _pageDecompressed;
                }

                using (_pageDecompressed)
                {
                    if (_page.LastSearchPosition >= _page.NumberOfEntries)
                    {
                        var pos = OptimizedOnlyMoveNewValueToTheRightPage(rightPage);
                        RecompressPageIfNeeded(wasModified: false);

                        return(pos);
                    }

                    return(SplitPageInHalf(rightPage));
                }
            }
        }
Example #3
0
        private void DecompressedCurrentPage()
        {
            Debug.Assert(_tree.IsLeafCompressionSupported);

            _decompressedPage?.Dispose();

            _currentPage = _decompressedPage = _tree.DecompressPage(_currentPage);
        }
Example #4
0
        private void DecompressedCurrentPage()
        {
            Debug.Assert(_tree.IsLeafCompressionSupported);

            _decompressedPage?.Dispose();

            _currentPage = _decompressedPage = _tree.DecompressPage(_currentPage, DecompressionUsage.Read, skipCache: false);
        }
Example #5
0
        private static async Task RenderPageInternalAsync(Tree tree, TreePageSafe page, TextWriter sw, string text, bool open, bool decompress)
        {
            await sw.WriteLineAsync(
                string.Format("<ul><li><input type='checkbox' id='page-{0}' {3} /><label for='page-{0}'>{4}: Page {0:#,#;;0} - {1} - {2:#,#;;0} entries {5}</label><ul>",
                              page.PageNumber, page.IsLeaf ? "Leaf" : "Branch", page.NumberOfEntries, open ? "checked" : "", text,
                              page.IsCompressed ? $"(Compressed ({page.NumberOfCompressedEntries} entries [uncompressed/compressed: {page.UncompressedSize}/{page.CompressedSize}])" : string.Empty));

            DecompressedLeafPage decompressedPage = null;

            if (page.IsCompressed && decompress)
            {
                decompressedPage = tree.DecompressPage(page.TreePage, DecompressionUsage.Read, skipCache: true);

                page = new TreePageSafe(page.Tree, decompressedPage);
            }

            try
            {
                for (int i = 0; i < page.NumberOfEntries; i++)
                {
                    var nodeHeader = page.GetNode(i);

                    string key = nodeHeader.Key;

                    if (page.IsLeaf)
                    {
                        await sw.WriteAsync(string.Format("<li>{0} {1} - size: {2:#,#}</li>", key, nodeHeader.Flags, nodeHeader.GetDataSize()));
                    }
                    else
                    {
                        var pageNum = nodeHeader.PageNumber;

                        if (i == 0)
                        {
                            key = "[smallest]";
                        }

                        await RenderPageAsync(tree, tree.GetReadOnlyTreePage(pageNum), sw, key, false, decompress);
                    }
                }
            }
            finally
            {
                decompressedPage?.Dispose();
            }

            await sw.WriteLineAsync("</ul></li></ul>");
        }
Example #6
0
        public DecompressedLeafPage DecompressPage(TreePage p, DecompressionUsage usage = DecompressionUsage.Read, bool skipCache = false)
        {
            var input = new DecompressionInput(p.CompressionHeader, p);

            DecompressedLeafPage decompressedPage;
            DecompressedLeafPage cached = null;

            if (skipCache == false && DecompressionsCache.TryGet(p.PageNumber, usage, out cached))
            {
                decompressedPage = ReuseCachedPage(cached, usage, ref input);

                if (usage == DecompressionUsage.Read)
                {
                    return(decompressedPage);
                }
            }
            else
            {
                decompressedPage = DecompressFromBuffer(usage, ref input);
            }

            Debug.Assert(decompressedPage.NumberOfEntries > 0);

            try
            {
                if (p.NumberOfEntries == 0)
                {
                    return(decompressedPage);
                }

                HandleUncompressedNodes(decompressedPage, p, usage);

                return(decompressedPage);
            }
            finally
            {
                decompressedPage.DebugValidate(this, State.RootPageNumber);

                if (skipCache == false && decompressedPage != cached)
                {
                    DecompressionsCache.Invalidate(p.PageNumber, usage);
                    DecompressionsCache.Add(decompressedPage);
                }
            }
        }
Example #7
0
        private ActualKeyScope GetActualKey(TreePage page, int pos, out TreeNodeHeader *node, out Slice key)
        {
            DecompressedLeafPage decompressedLeafPage = null;

            node = page.GetNode(pos);
            var scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key);

            while (key.Size == 0)
            {
                Debug.Assert(page.IsBranch);
                page = _tree.GetReadOnlyTreePage(node->PageNumber);
                if (page.IsCompressed == false)
                {
                    node = page.GetNode(0);
                }
                else
                {
                    decompressedLeafPage?.Dispose();
                    decompressedLeafPage = _tree.DecompressPage(page, skipCache: true);

                    if (decompressedLeafPage.NumberOfEntries > 0)
                    {
                        node = decompressedLeafPage.GetNode(0);
                    }
                    else
                    {
                        // we have empty page after decompression (each compressed entry has a corresponding CompressionTombstone)
                        // we can safely use the node key of first tombstone (they have proper order)

                        node = page.GetNode(0);
                    }
                }

                scope.Dispose();
                scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key);
            }

            return(new ActualKeyScope
            {
                DecompressedLeafPage = decompressedLeafPage,
                ExternalScope = scope
            });
        }
Example #8
0
        private void HandleTombstone(DecompressedLeafPage decompressedPage, Slice nodeKey, DecompressionUsage usage)
        {
            decompressedPage.Search(_llt, nodeKey);

            if (decompressedPage.LastMatch != 0)
            {
                return;
            }

            var node = decompressedPage.GetNode(decompressedPage.LastSearchPosition);

            if (usage == DecompressionUsage.Write)
            {
                State.NumberOfEntries--;

                if (node->Flags == TreeNodeFlags.PageRef)
                {
                    var overflowPage = GetReadOnlyTreePage(node->PageNumber);
                    FreePage(overflowPage);
                }
            }

            decompressedPage.RemoveNode(decompressedPage.LastSearchPosition);
        }
Example #9
0
        private void HandleUncompressedNodes(DecompressedLeafPage decompressedPage, TreePage p, DecompressionUsage usage)
        {
            int numberOfEntries = p.NumberOfEntries;

            for (var i = 0; i < numberOfEntries; i++)
            {
                var uncompressedNode = p.GetNode(i);

                Slice nodeKey;
                using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, uncompressedNode, out nodeKey))
                {
                    if (uncompressedNode->Flags == TreeNodeFlags.CompressionTombstone)
                    {
                        HandleTombstone(decompressedPage, nodeKey, usage);
                        continue;
                    }

                    if (decompressedPage.HasSpaceFor(_llt, TreeSizeOf.NodeEntry(uncompressedNode)) == false)
                    {
                        throw new InvalidOperationException("Could not add uncompressed node to decompressed page");
                    }

                    int index;

                    if (decompressedPage.NumberOfEntries > 0)
                    {
                        Slice lastKey;
                        using (decompressedPage.GetNodeKey(_llt, decompressedPage.NumberOfEntries - 1, out lastKey))
                        {
                            // optimization: it's very likely that uncompressed nodes have greater keys than compressed ones
                            // when we insert sequential keys

                            var cmp = SliceComparer.CompareInline(nodeKey, lastKey);

                            if (cmp > 0)
                            {
                                index = decompressedPage.NumberOfEntries;
                            }
                            else
                            {
                                if (cmp == 0)
                                {
                                    // update of the last entry, just decrement NumberOfEntries in the page and
                                    // put it at the last position

                                    index = decompressedPage.NumberOfEntries - 1;
                                    decompressedPage.Lower -= Constants.Tree.NodeOffsetSize;
                                }
                                else
                                {
                                    index = decompressedPage.NodePositionFor(_llt, nodeKey);

                                    if (decompressedPage.LastMatch == 0) // update
                                    {
                                        decompressedPage.RemoveNode(index);

                                        if (usage == DecompressionUsage.Write)
                                        {
                                            State.NumberOfEntries--;
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        // all uncompressed nodes were compresion tombstones which deleted all entries from the decompressed page
                        index = 0;
                    }

                    switch (uncompressedNode->Flags)
                    {
                    case TreeNodeFlags.PageRef:
                        decompressedPage.AddPageRefNode(index, nodeKey, uncompressedNode->PageNumber);
                        break;

                    case TreeNodeFlags.Data:
                        var pos       = decompressedPage.AddDataNode(index, nodeKey, uncompressedNode->DataSize);
                        var nodeValue = TreeNodeHeader.Reader(_llt, uncompressedNode);
                        Memory.Copy(pos, nodeValue.Base, nodeValue.Length);
                        break;

                    case TreeNodeFlags.MultiValuePageRef:
                        throw new NotSupportedException("Multi trees do not support compression");

                    default:
                        throw new NotSupportedException("Invalid node type to copye: " + uncompressedNode->Flags);
                    }
                }
            }
        }
Example #10
0
        private byte *SplitPageInHalf(TreePage rightPage)
        {
            bool toRight;

            var currentIndex = _page.LastSearchPosition;
            var splitIndex   = _page.NumberOfEntries / 2;

            if (currentIndex <= splitIndex)
            {
                toRight = false;
            }
            else
            {
                toRight = true;

                var leftPageEntryCount  = splitIndex;
                var rightPageEntryCount = _page.NumberOfEntries - leftPageEntryCount + 1;

                if (rightPageEntryCount > leftPageEntryCount)
                {
                    splitIndex++;

                    Debug.Assert(splitIndex < _page.NumberOfEntries);
                }
            }

            if (_page.IsLeaf)
            {
                splitIndex = AdjustSplitPosition(currentIndex, splitIndex, ref toRight);
            }

            Slice currentKey;

            using (_page.GetNodeKey(_tx, splitIndex, out currentKey))
            {
                Slice seperatorKey;
                if (toRight && splitIndex == currentIndex)
                {
                    seperatorKey = SliceComparer.Compare(currentKey, _newKey) < 0 ? currentKey : _newKey;
                }
                else
                {
                    seperatorKey = currentKey;
                }

                var      addedAsImplicitRef = false;
                var      parentOfPage       = _cursor.CurrentPage;
                TreePage parentOfRight;

                DecompressedLeafPage rightDecompressed = null;

                if (_pageDecompressed != null)
                {
                    // splitting the decompressed page, let's allocate the page of the same size to ensure enough space
                    rightDecompressed = _tx.Environment.DecompressionBuffers.GetPage(_tx, _pageDecompressed.PageSize, DecompressionUsage.Write, rightPage);
                    rightPage         = rightDecompressed;
                }

                using (rightDecompressed)
                {
                    AddSeparatorToParentPage(rightPage.PageNumber, seperatorKey, out parentOfRight);

                    if (_page.IsBranch && toRight && SliceComparer.EqualsInline(seperatorKey, _newKey))
                    {
                        // _newKey needs to be inserted as first key (BeforeAllKeys) to the right page, so we need to add it before we move entries from the current page
                        AddNodeToPage(rightPage, 0, Slices.BeforeAllKeys);
                        addedAsImplicitRef = true;
                    }

                    // move the actual entries from page to right page
                    ushort nKeys = _page.NumberOfEntries;
                    for (int i = splitIndex; i < nKeys; i++)
                    {
                        TreeNodeHeader *node = _page.GetNode(i);
                        if (_page.IsBranch && rightPage.NumberOfEntries == 0)
                        {
                            rightPage.CopyNodeDataToEndOfPage(node, Slices.BeforeAllKeys);
                        }
                        else
                        {
                            Slice instance;
                            using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out instance))
                            {
                                rightPage.CopyNodeDataToEndOfPage(node, instance);
                            }
                        }
                    }

                    if (rightDecompressed != null)
                    {
                        rightDecompressed.CopyToOriginal(_tx, defragRequired: false, wasModified: true);
                        rightPage = rightDecompressed.Original;
                    }
                }

                _page.Truncate(_tx, splitIndex);

                RecompressPageIfNeeded(wasModified: true);

                byte *pos;

                if (addedAsImplicitRef == false)
                {
                    try
                    {
                        if (toRight && _cursor.CurrentPage.PageNumber != parentOfRight.PageNumber)
                        {
                            // modify the cursor if we are going to insert to the right page
                            _cursor.Pop();
                            _cursor.Push(parentOfRight);
                        }

                        // actually insert the new key
                        pos = InsertNewKey(toRight ? rightPage : _page);
                    }
                    catch (InvalidOperationException e)
                    {
                        if (
                            e.Message.StartsWith("The page is full and cannot add an entry", StringComparison.Ordinal) ==
                            false)
                        {
                            throw;
                        }

                        throw new InvalidOperationException(
                                  GatherDetailedDebugInfo(rightPage, currentKey, seperatorKey, currentIndex, splitIndex,
                                                          toRight), e);
                    }
                }
                else
                {
                    pos = null;
                    _cursor.Push(rightPage);
                }

                if (_page.IsBranch)
                // remove a branch that has only one entry, the page ref needs to be added to the parent of the current page
                {
                    Debug.Assert(_page.NumberOfEntries > 0);
                    Debug.Assert(rightPage.NumberOfEntries > 0);

                    if (_page.NumberOfEntries == 1)
                    {
                        RemoveBranchWithOneEntry(_page, parentOfPage);
                    }

                    if (rightPage.NumberOfEntries == 1)
                    {
                        RemoveBranchWithOneEntry(rightPage, parentOfRight);
                    }
                }

                return(pos);
            }
        }
Example #11
0
        public byte *Execute()
        {
            using (DisableFreeSpaceUsageIfSplittingRootTree())
            {
                TreePage rightPage = _tree.NewPage(_page.TreeFlags, 1);

                if (_cursor.PageCount == 0) // we need to do a root split
                {
                    TreePage newRootPage = _tree.NewPage(TreePageFlags.Branch, 1);
                    _cursor.Push(newRootPage);
                    _tree.State.RootPageNumber = newRootPage.PageNumber;
                    _tree.State.Depth++;

                    // now add implicit left page
                    newRootPage.AddPageRefNode(0, Slices.BeforeAllKeys, _page.PageNumber);
                    _parentPage = newRootPage;
                    _parentPage.LastSearchPosition++;
                }
                else
                {
                    // we already popped the page, so the current one on the stack is the parent of the page

                    _parentPage = _tree.ModifyPage(_cursor.CurrentPage);

                    _cursor.Update(_cursor.Pages.First, _parentPage);
                }

                if (_page.IsLeaf)
                {
                    _tree.ClearPagesCache();
                }

                if (_page.IsCompressed)
                {
                    _pageDecompressed = _tree.DecompressPage(_page);
                    _pageDecompressed.Search(_tx, _newKey);
                    _page = _pageDecompressed;
                }

                using (_pageDecompressed)
                {
                    if (_page.LastSearchPosition >= _page.NumberOfEntries)
                    {
                        // when we get a split at the end of the page, we take that as a hint that the user is doing
                        // sequential inserts, at that point, we are going to keep the current page as is and create a new
                        // page, this will allow us to do minimal amount of work to get the best density

                        TreePage branchOfSeparator;

                        byte *pos;
                        if (_page.IsBranch)
                        {
                            if (_page.NumberOfEntries > 2)
                            {
                                // here we steal the last entry from the current page so we maintain the implicit null left entry

                                TreeNodeHeader *node = _page.GetNode(_page.NumberOfEntries - 1);
                                Debug.Assert(node->Flags == TreeNodeFlags.PageRef);
                                rightPage.AddPageRefNode(0, Slices.BeforeAllKeys, node->PageNumber);
                                pos = AddNodeToPage(rightPage, 1);

                                Slice separatorKey;
                                using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out separatorKey))
                                {
                                    AddSeparatorToParentPage(rightPage.PageNumber, separatorKey, out branchOfSeparator);
                                }

                                _page.RemoveNode(_page.NumberOfEntries - 1);
                            }
                            else
                            {
                                _tree.FreePage(rightPage); // return the unnecessary right page
                                pos = AddSeparatorToParentPage(_pageNumber, _newKey, out branchOfSeparator);

                                if (_cursor.CurrentPage.PageNumber != branchOfSeparator.PageNumber)
                                {
                                    _cursor.Push(branchOfSeparator);
                                }

                                return(pos);
                            }
                        }
                        else
                        {
                            AddSeparatorToParentPage(rightPage.PageNumber, _newKey, out branchOfSeparator);
                            pos = AddNodeToPage(rightPage, 0);
                        }
                        _cursor.Push(rightPage);
                        return(pos);
                    }

                    return(SplitPageInHalf(rightPage));
                }
            }
        }
Example #12
0
        private ActualKeyScope GetActualKey(TreePage page, int pos, out TreeNodeHeader *node, out Slice key)
        {
            DecompressedLeafPage decompressedLeafPage = null;

            node = page.GetNode(pos);
            var scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key);

            while (key.Size == 0)
            {
                Debug.Assert(page.IsBranch);
                page = _tree.GetReadOnlyTreePage(node->PageNumber);
                if (page.IsCompressed == false)
                {
                    node = page.GetNode(0);
                }
                else
                {
                    decompressedLeafPage?.Dispose();
                    decompressedLeafPage = _tree.DecompressPage(page, DecompressionUsage.Read, skipCache: true);

                    if (decompressedLeafPage.NumberOfEntries > 0)
                    {
                        if (page.NumberOfEntries == 0)
                        {
                            node = decompressedLeafPage.GetNode(0);
                        }
                        else
                        {
                            // we want to find the smallest key in compressed page
                            // it can be inside compressed part or not compressed one
                            // in particular, it can be the key of compression tombstone node that we don't see after decompression
                            // so we need to take first keys from decompressed and compressed page and compare them

                            var decompressedNode = decompressedLeafPage.GetNode(0);
                            var compressedNode   = page.GetNode(0);

                            using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, decompressedNode, out var firstDecompressedKey))
                                using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, compressedNode, out var firstCompressedKey))
                                {
                                    node = SliceComparer.CompareInline(firstDecompressedKey, firstCompressedKey) > 0 ? compressedNode : decompressedNode;
                                }
                        }
                    }
                    else
                    {
                        // we have empty page after decompression (each compressed entry has a corresponding CompressionTombstone)
                        // we can safely use the node key of first tombstone (they have proper order)

                        node = page.GetNode(0);
                    }
                }

                scope.Dispose();
                scope = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out key);
            }

            return(new ActualKeyScope
            {
                DecompressedLeafPage = decompressedLeafPage,
                ExternalScope = scope
            });
        }