private unsafe static void RenderPage(LowLevelTransaction tx, TreePage page, TextWriter sw, string text, bool open) { sw.WriteLine( "<ul><li><input type='checkbox' id='page-{0}' {3} /><label for='page-{0}'>{4}: Page {0:#,#;;0} - {1} - {2:#,#;;0} entries</label><ul>", page.PageNumber, page.IsLeaf ? "Leaf" : "Branch", page.NumberOfEntries, open ? "checked" : "", text); for (int i = 0; i < page.NumberOfEntries; i++) { var nodeHeader = page.GetNode(i); var key = TreeNodeHeader.ToSlicePtr(tx.Allocator, nodeHeader).ToString(); if (page.IsLeaf) { sw.Write("<li>{0} {1} - size: {2:#,#}</li>", key, nodeHeader->Flags, TreeNodeHeader.GetDataSize(tx, nodeHeader)); } else { var pageNum = nodeHeader->PageNumber; if (i == 0) { key = "[smallest]"; } RenderPage(tx, tx.GetReadOnlyTreePage(pageNum), sw, key, false); } } sw.WriteLine("</ul></li></ul>"); }
private AggregationResult AggregateBranchPage(TreePage page, Table table, TransactionOperationContext indexContext, HashSet <long> remainingBranchesToAggregate, IndexingStatsScope stats, CancellationToken token) { using (_treeReductionStats.BranchAggregation.Start()) { for (int i = 0; i < page.NumberOfEntries; i++) { var pageNumber = page.GetNode(i)->PageNumber; var childPageNumber = Bits.SwapBytes(pageNumber); Slice childPageNumberSlice; TableValueReader tvr; using (Slice.External(indexContext.Allocator, (byte *)&childPageNumber, sizeof(long), out childPageNumberSlice)) { tvr = table.ReadByKey(childPageNumberSlice); if (tvr == null) { if (remainingBranchesToAggregate.Contains(pageNumber)) { // we have a modified branch page but its children were not modified (branch page splitting) so we didn't aggregated it yet, let's do it now try { page.Base = indexContext.Transaction.InnerTransaction.LowLevelTransaction.GetPage(pageNumber).Pointer; using (var result = AggregateBranchPage(page, table, indexContext, remainingBranchesToAggregate, stats, token)) { StoreAggregationResult(page.PageNumber, page.NumberOfEntries, table, result, stats); } } finally { remainingBranchesToAggregate.Remove(pageNumber); } tvr = table.ReadByKey(childPageNumberSlice); } else { throw new InvalidOperationException("Couldn't find pre-computed results for existing page " + pageNumber); } } int size; var numberOfResults = *(int *)tvr.Read(2, out size); for (int j = 0; j < numberOfResults; j++) { _aggregationBatch.Add(new BlittableJsonReaderObject(tvr.Read(3 + j, out size), size, indexContext)); } } } return(AggregateBatchResults(_aggregationBatch, indexContext, token)); } }
private AggregationResult AggregateLeafPage(TreePage page, LowLevelTransaction lowLevelTransaction, TransactionOperationContext indexContext, CancellationToken token) { using (_treeReductionStats.LeafAggregation.Start()) { for (int i = 0; i < page.NumberOfEntries; i++) { var valueReader = TreeNodeHeader.Reader(lowLevelTransaction, page.GetNode(i)); var reduceEntry = new BlittableJsonReaderObject(valueReader.Base, valueReader.Length, indexContext); _aggregationBatch.Add(reduceEntry); } return(AggregateBatchResults(_aggregationBatch, indexContext, token)); } }
private AggregationResult AggregateBranchPage(TreePage page, Table table, TransactionOperationContext indexContext, HashSet <long> remainingBranchesToAggregate, HashSet <long> compressedEmptyLeafs, Dictionary <long, Exception> failedAggregatedLeafs, Tree tree, CancellationToken token) { using (_treeReductionStats.BranchAggregation.Start()) { for (int i = 0; i < page.NumberOfEntries; i++) { var pageNumber = page.GetNode(i)->PageNumber; var childPageNumber = Bits.SwapBytes(pageNumber); using (Slice.External(indexContext.Allocator, (byte *)&childPageNumber, sizeof(long), out Slice childPageNumberSlice)) { if (table.ReadByKey(childPageNumberSlice, out TableValueReader tvr) == false) { if (TryAggregateChildPageOrThrow(pageNumber, table, indexContext, remainingBranchesToAggregate, compressedEmptyLeafs, failedAggregatedLeafs, tree, token)) { table.ReadByKey(childPageNumberSlice, out tvr); } else { continue; } } var numberOfResults = *(int *)tvr.Read(2, out int size); for (int j = 0; j < numberOfResults; j++) { _aggregationBatch.Items.Add(new BlittableJsonReaderObject(tvr.Read(3 + j, out size), size, indexContext)); } } } return(AggregateBatchResults(_aggregationBatch.Items, indexContext, _treeReductionStats.BranchAggregation, token)); } }
private static void GatherBalanceDistribution(Tree tree, TreePage page, Dictionary <int, int> histogram, int depth) { if (page.IsLeaf) { if (!histogram.TryGetValue(depth, out int value)) { value = 0; } histogram[depth] = value + 1; } else { for (int i = 0; i < page.NumberOfEntries; i++) { var nodeHeader = page.GetNode(i); var pageNum = nodeHeader->PageNumber; GatherBalanceDistribution(tree, tree.GetReadOnlyTreePage(pageNum), histogram, depth + 1); } } }
private static unsafe void RenderPage(Tree tree, TreePage page, TextWriter sw, string text, bool open) { sw.WriteLine( "<ul><li><input type='checkbox' id='page-{0}' {3} /><label for='page-{0}'>{4}: Page {0:#,#;;0} - {1} - {2:#,#;;0} entries {5}</label><ul>", page.PageNumber, page.IsLeaf ? "Leaf" : "Branch", page.NumberOfEntries, open ? "checked" : "", text, page.IsCompressed? $"(Compressed ({page.CompressionHeader->NumberOfCompressedEntries} entries [uncompressed/compressed: {page.CompressionHeader->UncompressedSize}/{page.CompressionHeader->CompressedSize}])" : string.Empty); for (int i = 0; i < page.NumberOfEntries; i++) { var nodeHeader = page.GetNode(i); string key; Slice keySlice; using (TreeNodeHeader.ToSlicePtr(tree.Llt.Allocator, nodeHeader, out keySlice)) { key = keySlice.ToString(); } if (page.IsLeaf) { sw.Write("<li>{0} {1} - size: {2:#,#}</li>", key, nodeHeader->Flags, tree.GetDataSize(nodeHeader)); } else { var pageNum = nodeHeader->PageNumber; if (i == 0) { key = "[smallest]"; } RenderPage(tree, tree.GetReadOnlyTreePage(pageNum), sw, key, false); } } sw.WriteLine("</ul></li></ul>"); }
public TreeNodeHeader *Original_WithPrefetch_Search(TreePage page, ByteStringContext allocator, Slice key) { int numberOfEntries = page.NumberOfEntries; if (numberOfEntries == 0) { goto NoEntries; } int lastMatch = -1; int lastSearchPosition = 0; SliceOptions options = key.Options; if (options == SliceOptions.Key) { if (numberOfEntries == 1) { goto SingleEntryKey; } int low = page.IsLeaf ? 0 : 1; int high = numberOfEntries - 1; int position = 0; ushort *offsets = page.KeysOffsets; byte * @base = page.Base; while (low <= high) { position = (low + high) >> 1; var node = (TreeNodeHeader *)(@base + offsets[position]); Slice pageKey; using (TreeNodeHeader.ToSlicePtr(allocator, node, out pageKey)) { Sse.Prefetch0(pageKey.Content.Ptr); lastMatch = SliceComparer.CompareInline(key, pageKey); } if (lastMatch == 0) { break; } if (lastMatch > 0) { low = position + 1; } else { high = position - 1; } } if (lastMatch > 0) // found entry less than key { position++; // move to the smallest entry larger than the key } lastSearchPosition = position; goto MultipleEntryKey; } if (options == SliceOptions.BeforeAllKeys) { lastMatch = 1; goto MultipleEntryKey; } if (options == SliceOptions.AfterAllKeys) { lastSearchPosition = numberOfEntries - 1; goto MultipleEntryKey; } return(null); NoEntries: { page.LastSearchPosition = 0; page.LastMatch = 1; return(null); } SingleEntryKey: { var node = page.GetNode(0); Slice pageKey; using (TreeNodeHeader.ToSlicePtr(allocator, node, out pageKey)) { page.LastMatch = SliceComparer.CompareInline(key, pageKey); } page.LastSearchPosition = page.LastMatch > 0 ? 1 : 0; return(page.LastSearchPosition == 0 ? node : null); } MultipleEntryKey: { page.LastMatch = lastMatch; page.LastSearchPosition = lastSearchPosition; if (lastSearchPosition >= numberOfEntries) { return(null); } return(page.GetNode(lastSearchPosition)); } }