public bool TryGet(long pageNumber, DecompressionUsage usage, out DecompressedLeafPage decompressed) { int position = _current; int itemsLeft = Size; while (itemsLeft > 0) { var page = _cache[position % Size]; if (page == null) { itemsLeft--; position++; continue; } if (page.PageNumber == pageNumber && page.Usage == usage) { Debug.Assert(page.Cached); decompressed = page; return(true); } itemsLeft--; position++; } decompressed = null; return(false); }
private DecompressedLeafPage ReuseCachedPage(DecompressedLeafPage cached, DecompressionUsage usage, ref DecompressionInput input) { DecompressedLeafPage result; var sizeDiff = input.DecompressedPageSize - cached.PageSize; if (sizeDiff > 0) { result = _llt.Environment.DecompressionBuffers.GetPage(_llt, input.DecompressedPageSize, usage, input.Page); Memory.Copy(result.Base, cached.Base, cached.Lower); Memory.Copy(result.Base + cached.Upper + sizeDiff, cached.Base + cached.Upper, cached.PageSize - cached.Upper); result.Upper += (ushort)sizeDiff; for (var i = 0; i < result.NumberOfEntries; i++) { result.KeysOffsets[i] += (ushort)sizeDiff; } } else { result = cached; } return(result); }
public DecompressedLeafPage GetPage(LowLevelTransaction tx, int pageSize, DecompressionUsage usage, TreePage original) { GetTemporaryPage(tx, pageSize, out var tempPage); var treePage = tempPage.GetTempPage(); return(new DecompressedLeafPage(treePage.Base, treePage.PageSize, usage, original, tempPage)); }
public DecompressedLeafPage(byte *basePtr, int pageSize, DecompressionUsage usage, TreePage original, TemporaryPage tempPage) : base(basePtr, pageSize) { Original = original; Usage = usage; _tempPage = tempPage; PageNumber = Original.PageNumber; TreeFlags = Original.TreeFlags; Flags = Original.Flags & ~PageFlags.Compressed; }
public void Invalidate(long pageNumber, DecompressionUsage usage) { for (int i = 0; i < _cache.Length; i++) { var cached = _cache[i]; if (cached != null && cached.PageNumber == pageNumber && cached.Usage == usage) { cached.Cached = false; cached.Dispose(); _cache[i] = null; return; } } }
public DecompressedLeafPage DecompressPage(TreePage p, DecompressionUsage usage = DecompressionUsage.Read, bool skipCache = false) { var input = new DecompressionInput(p.CompressionHeader, p); DecompressedLeafPage decompressedPage; DecompressedLeafPage cached = null; if (skipCache == false && DecompressionsCache.TryGet(p.PageNumber, usage, out cached)) { decompressedPage = ReuseCachedPage(cached, usage, ref input); if (usage == DecompressionUsage.Read) { return(decompressedPage); } } else { decompressedPage = DecompressFromBuffer(usage, ref input); } Debug.Assert(decompressedPage.NumberOfEntries > 0); try { if (p.NumberOfEntries == 0) { return(decompressedPage); } HandleUncompressedNodes(decompressedPage, p, usage); return(decompressedPage); } finally { decompressedPage.DebugValidate(this, State.RootPageNumber); if (skipCache == false && decompressedPage != cached) { DecompressionsCache.Invalidate(p.PageNumber, usage); DecompressionsCache.Add(decompressedPage); } } }
private DecompressedLeafPage DecompressFromBuffer(DecompressionUsage usage, ref DecompressionInput input) { var result = _llt.Environment.DecompressionBuffers.GetPage(_llt, input.DecompressedPageSize, usage, input.Page); var decompressedNodesOffset = (ushort)(result.PageSize - input.DecompressedSize); LZ4.Decode64LongBuffers( input.Data, input.CompressedSize, result.Base + decompressedNodesOffset, input.DecompressedSize, true); result.Lower += input.KeysOffsetsSize; result.Upper = decompressedNodesOffset; for (var i = 0; i < input.NumberOfEntries; i++) { result.KeysOffsets[i] = (ushort)(input.KeysOffsets[i] + result.Upper); } return(result); }
private void HandleTombstone(DecompressedLeafPage decompressedPage, Slice nodeKey, DecompressionUsage usage) { decompressedPage.Search(_llt, nodeKey); if (decompressedPage.LastMatch != 0) { return; } var node = decompressedPage.GetNode(decompressedPage.LastSearchPosition); if (usage == DecompressionUsage.Write) { State.NumberOfEntries--; if (node->Flags == TreeNodeFlags.PageRef) { var overflowPage = GetReadOnlyTreePage(node->PageNumber); FreePage(overflowPage); } } decompressedPage.RemoveNode(decompressedPage.LastSearchPosition); }
private void HandleUncompressedNodes(DecompressedLeafPage decompressedPage, TreePage p, DecompressionUsage usage) { int numberOfEntries = p.NumberOfEntries; for (var i = 0; i < numberOfEntries; i++) { var uncompressedNode = p.GetNode(i); Slice nodeKey; using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, uncompressedNode, out nodeKey)) { if (uncompressedNode->Flags == TreeNodeFlags.CompressionTombstone) { HandleTombstone(decompressedPage, nodeKey, usage); continue; } if (decompressedPage.HasSpaceFor(_llt, TreeSizeOf.NodeEntry(uncompressedNode)) == false) { throw new InvalidOperationException("Could not add uncompressed node to decompressed page"); } int index; if (decompressedPage.NumberOfEntries > 0) { Slice lastKey; using (decompressedPage.GetNodeKey(_llt, decompressedPage.NumberOfEntries - 1, out lastKey)) { // optimization: it's very likely that uncompressed nodes have greater keys than compressed ones // when we insert sequential keys var cmp = SliceComparer.CompareInline(nodeKey, lastKey); if (cmp > 0) { index = decompressedPage.NumberOfEntries; } else { if (cmp == 0) { // update of the last entry, just decrement NumberOfEntries in the page and // put it at the last position index = decompressedPage.NumberOfEntries - 1; decompressedPage.Lower -= Constants.Tree.NodeOffsetSize; } else { index = decompressedPage.NodePositionFor(_llt, nodeKey); if (decompressedPage.LastMatch == 0) // update { decompressedPage.RemoveNode(index); if (usage == DecompressionUsage.Write) { State.NumberOfEntries--; } } } } } } else { // all uncompressed nodes were compresion tombstones which deleted all entries from the decompressed page index = 0; } switch (uncompressedNode->Flags) { case TreeNodeFlags.PageRef: decompressedPage.AddPageRefNode(index, nodeKey, uncompressedNode->PageNumber); break; case TreeNodeFlags.Data: var pos = decompressedPage.AddDataNode(index, nodeKey, uncompressedNode->DataSize); var nodeValue = TreeNodeHeader.Reader(_llt, uncompressedNode); Memory.Copy(pos, nodeValue.Base, nodeValue.Length); break; case TreeNodeFlags.MultiValuePageRef: throw new NotSupportedException("Multi trees do not support compression"); default: throw new NotSupportedException("Invalid node type to copye: " + uncompressedNode->Flags); } } } }