public static TreeReport GetReport(FixedSizeTree fst, bool includeDetails) { List <double> pageDensities = null; if (includeDetails) { pageDensities = GetPageDensities(fst); } var density = pageDensities?.Average() ?? -1; var treeReport = new TreeReport { Type = fst.Type ?? RootObjectType.FixedSizeTree, Name = fst.Name.ToString(), BranchPages = -1, Depth = fst.Depth, NumberOfEntries = fst.NumberOfEntries, LeafPages = -1, OverflowPages = 0, PageCount = fst.PageCount, Density = density, AllocatedSpaceInBytes = fst.PageCount * Constants.Storage.PageSize, UsedSpaceInBytes = includeDetails ? (long)(fst.PageCount * Constants.Storage.PageSize * density) : -1, MultiValues = null, }; return(treeReport); }
private long?TryFindSmallValue(LowLevelTransaction tx, FixedSizeTree freeSpaceTree, FixedSizeTree.IFixedSizeIterator it, int num) { do { var current = new StreamBitArray(it.CreateReaderForCurrent()); long?page; if (current.SetCount < num) { if (TryFindSmallValueMergingTwoSections(tx, freeSpaceTree, it.CurrentKey, num, current, out page)) { return(page); } continue; } if (TryFindContinuousRange(tx, freeSpaceTree, it, num, current, it.CurrentKey, out page)) { return(page); } //could not find a continuous so trying to merge if (TryFindSmallValueMergingTwoSections(tx, freeSpaceTree, it.CurrentKey, num, current, out page)) { return(page); } }while (it.MoveNext()); return(null); }
internal static unsafe Queue <MapEntry> GetMapEntries(FixedSizeTree documentMapEntries) { var entries = new Queue <MapEntry>((int)documentMapEntries.NumberOfEntries); if (documentMapEntries.NumberOfEntries == 0) { return(entries); } using (var it = documentMapEntries.Iterate()) { if (it.Seek(long.MinValue) == false) { ThrowCouldNotSeekToFirstElement(documentMapEntries.Name); } do { var currentKey = it.CurrentKey; ulong reduceKeyHash; it.CreateReaderForCurrent().Read((byte *)&reduceKeyHash, sizeof(ulong)); entries.Enqueue(new MapEntry { Id = currentKey, ReduceKeyHash = reduceKeyHash }); } while (it.MoveNext()); } return(entries); }
private static unsafe void DumpFixedSizeTreeToStream(LowLevelTransaction tx, FixedSizeTree fst, TextWriter writer, Slice name, Tree tree) { var ptr = tree.DirectRead(name); if (ptr == null) { writer.WriteLine("<p>empty fixed size tree</p>"); } else if (((FixedSizeTreeHeader.Embedded *)ptr)->RootObjectType == RootObjectType.EmbeddedFixedSizeTree) { var header = ((FixedSizeTreeHeader.Embedded *)ptr); writer.WriteLine("<p>Number of entries: {0:#,#;;0}, val size: {1:#,#;;0}.</p>", header->NumberOfEntries, header->ValueSize); writer.WriteLine("<ul>"); var dataStart = ptr + sizeof(FixedSizeTreeHeader.Embedded); for (int i = 0; i < header->NumberOfEntries; i++) { var key = *(long *)(dataStart + ((sizeof(long) + header->ValueSize) * i)); writer.WriteLine("<li>{0:#,#;;0}</li>", key); } writer.WriteLine("</ul>"); } else { var header = (FixedSizeTreeHeader.Large *)ptr; writer.WriteLine("<p>Number of entries: {0:#,#;;0}, val size: {1:#,#;;0}.</p>", header->NumberOfEntries, header->ValueSize); writer.WriteLine("<div class='css-treeview'><ul>"); var page = fst.GetReadOnlyPage(header->RootPageNumber); RenderFixedSizeTreePage(tx, page, writer, header, "Root", true); writer.WriteLine("</ul></div>"); } }
public ChunkDetails[] ReadTreeChunks(Slice key, out FixedSizeTree tree) { tree = FixedTreeFor(key, ChunkDetails.SizeOf); var numberOfChunks = tree.NumberOfEntries; if (numberOfChunks <= 0) { return(null); } var chunksDetails = new ChunkDetails[numberOfChunks]; var i = 0; using (var it = tree.Iterate()) { if (it.Seek(0) == false) { Debug.Assert(false, "ReadTreeChunks failed to find any chunks, but we checked that the fst is not empty"); return(null); // can never happen } do { using (it.Value(out Slice slice)) { chunksDetails[i++] = *(ChunkDetails *)slice.Content.Ptr; } } while (it.MoveNext()); } return(chunksDetails); }
private unsafe Page AllocateMoreSpace(FixedSizeTree fst) { int numberOfPagesToAllocate = GetNumberOfPagesToAllocate(); var allocatePage = _llt.AllocatePage(numberOfPagesToAllocate); _llt.BreakLargeAllocationToSeparatePages(allocatePage.PageNumber); var initialPageNumber = allocatePage.PageNumber; bool isNew; byte *ptr; using (fst.DirectAdd(initialPageNumber, out isNew, out ptr)) { if (isNew == false) { ThrowInvalidExistingBuffer(); } // in 32 bits, we pre-allocate just 256 KB, not 2MB Debug.Assert(numberOfPagesToAllocate % 8 == 0); Debug.Assert(numberOfPagesToAllocate % 8 <= BitmapSize); Memory.Set(ptr, 0xFF, BitmapSize); // mark the pages that we haven't allocated as busy Memory.Set(ptr, 0, numberOfPagesToAllocate / 8); // mark just the first part as free } return(allocatePage); }
private static long Calculate(FixedSizeTree fst, long afterValue, out long totalCount) { totalCount = fst.NumberOfEntries; if (totalCount == 0) { return(0); } long count = 0; using (var it = fst.Iterate()) { if (it.Seek(afterValue) == false) { return(0); } do { if (it.CurrentKey == afterValue) { continue; } count++; } while (it.MoveNext()); } return(count); }
public List <long> AllPages() { var results = new List <long>(); var stack = new Stack <Page>(); var root = _tx.GetReadOnlyPage(State.RootPageNumber); stack.Push(root); while (stack.Count > 0) { var p = stack.Pop(); results.Add(p.PageNumber); var key = p.CreateNewEmptyKey(); for (int i = 0; i < p.NumberOfEntries; i++) { var node = p.GetNode(i); var pageNumber = node->PageNumber; if (p.IsBranch) { stack.Push(_tx.GetReadOnlyPage(pageNumber)); } else if (node->Flags == NodeFlags.PageRef) { // This is an overflow page var overflowPage = _tx.GetReadOnlyPage(pageNumber); var numberOfPages = _tx.DataPager.GetNumberOfOverflowPages(overflowPage.OverflowSize); for (long j = 0; j < numberOfPages; ++j) { results.Add(overflowPage.PageNumber + j); } } else if (node->Flags == NodeFlags.MultiValuePageRef) { // this is a multi value p.SetNodeKey(node, ref key); var tree = OpenMultiValueTree(_tx, (Slice)key, node); results.AddRange(tree.AllPages()); } else { if (State.Flags.HasFlag(TreeFlags.FixedSizeTrees)) { var valueReader = NodeHeader.Reader(_tx, node); byte valueSize = *valueReader.Base; var fixedSizeTreeName = p.GetNodeKey(i); var fixedSizeTree = new FixedSizeTree(_tx, this, (Slice)fixedSizeTreeName, valueSize); var pages = fixedSizeTree.AllPages(); results.AddRange(pages); } } } } return(results); }
public EmbeddedIterator(FixedSizeTree fst) { _fst = fst; var ptr = _fst._parent.DirectRead(_fst._treeName); _header = (FixedSizeTreeHeader.Embedded *)ptr; _dataStart = ptr + sizeof(FixedSizeTreeHeader.Embedded); }
public List <long> AllPages() { var results = new List <long>(); var stack = new Stack <TreePage>(); var root = _llt.GetReadOnlyTreePage(State.RootPageNumber); stack.Push(root); Slice key = default(Slice); while (stack.Count > 0) { var p = stack.Pop(); results.Add(p.PageNumber); for (int i = 0; i < p.NumberOfEntries; i++) { var node = p.GetNode(i); var pageNumber = node->PageNumber; if (p.IsBranch) { stack.Push(_llt.GetReadOnlyTreePage(pageNumber)); } else if (node->Flags == TreeNodeFlags.PageRef) { // This is an overflow page var overflowPage = _llt.GetReadOnlyTreePage(pageNumber); var numberOfPages = _llt.DataPager.GetNumberOfOverflowPages(overflowPage.OverflowSize); for (long j = 0; j < numberOfPages; ++j) { results.Add(overflowPage.PageNumber + j); } } else if (node->Flags == TreeNodeFlags.MultiValuePageRef) { key = TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, ByteStringType.Mutable); var tree = OpenMultiValueTree(key, node); results.AddRange(tree.AllPages()); } else { if ((State.Flags & TreeFlags.FixedSizeTrees) == TreeFlags.FixedSizeTrees) { var valueReader = TreeNodeHeader.Reader(_llt, node); var valueSize = ((FixedSizeTreeHeader.Embedded *)valueReader.Base)->ValueSize; var fixedSizeTreeName = p.GetNodeKey(_llt, i); var fixedSizeTree = new FixedSizeTree(_llt, this, fixedSizeTreeName, valueSize); var pages = fixedSizeTree.AllPages(); results.AddRange(pages); } } } } return(results); }
public void Init(Tree parent, Slice key, Slice?tag, int?initialNumberOfPagesPerChunk) { _parent = parent; _numberOfPagesPerChunk = 1; _tree = _parent.FixedTreeFor(key, ChunkDetails.SizeOf); _version = _parent.DeleteStream(key); _numberOfPagesPerChunk = initialNumberOfPagesPerChunk ?? 1; _tag = tag; }
public static void RenderAndShow_FixedSizeTree(LowLevelTransaction tx, FixedSizeTree fst) { var name = fst.Name; var tree = fst.Parent; RenderHtmlTreeView(writer => { DumpFixedSizeTreeToStreamAsync(tx, fst, writer, name, tree).Wait(); }); }
private static IEnumerable <ReduceTree> IterateTrees(Index self, FixedSizeTree mapEntries, Tree reducePhaseTree, FixedSizeTree typePerHash, TransactionOperationContext indexContext) { var entriesPerReduceKeyHash = new Dictionary <ulong, List <MapEntry> >(); foreach (var mapEntry in MapReduceIndexBase <MapReduceIndexDefinition> .GetMapEntries(mapEntries)) { List <MapEntry> entries; if (entriesPerReduceKeyHash.TryGetValue(mapEntry.ReduceKeyHash, out entries) == false) { entriesPerReduceKeyHash[mapEntry.ReduceKeyHash] = entries = new List <MapEntry>(); } entries.Add(mapEntry); } foreach (var item in entriesPerReduceKeyHash) { MapReduceResultsStore store; var mapReduceIndex = self as MapReduceIndex; if (mapReduceIndex != null) { store = mapReduceIndex.CreateResultsStore(typePerHash, item.Key, indexContext, false); } else { store = ((AutoMapReduceIndex)self).CreateResultsStore(typePerHash, item.Key, indexContext, false); } using (store) { switch (store.Type) { case MapResultsStorageType.Tree: yield return(RenderTree(store.Tree, item.Value, mapEntries.Name.ToString(), self, indexContext)); break; case MapResultsStorageType.Nested: yield return (RenderNestedSection(store.GetNestedResultsSection(reducePhaseTree), item.Value, mapEntries.Name.ToString(), self, indexContext)); break; default: throw new ArgumentOutOfRangeException(store.Type.ToString()); } } } }
private static IEnumerable <ReduceTree> IterateTrees(Index self, List <FixedSizeTree> mapEntries, Tree reducePhaseTree, FixedSizeTree typePerHash, TransactionOperationContext indexContext, DisposableScope scope) { var reduceKeys = new HashSet <ulong>(); var idToDocIdHash = new Dictionary <long, string>(); foreach (var tree in mapEntries) { foreach (var mapEntry in MapReduceIndexBase <MapReduceIndexDefinition, IndexField> .GetMapEntries(tree)) { reduceKeys.Add(mapEntry.ReduceKeyHash); idToDocIdHash[mapEntry.Id] = tree.Name.ToString(); } } foreach (var reduceKeyHash in reduceKeys) { MapReduceResultsStore store; var mapReduceIndex = self as MapReduceIndex; if (mapReduceIndex != null) { store = mapReduceIndex.CreateResultsStore(typePerHash, reduceKeyHash, indexContext, false); } else { store = ((AutoMapReduceIndex)self).CreateResultsStore(typePerHash, reduceKeyHash, indexContext, false); } using (store) { ReduceTree tree; switch (store.Type) { case MapResultsStorageType.Tree: tree = RenderTree(store.Tree, reduceKeyHash, idToDocIdHash, self, indexContext); break; case MapResultsStorageType.Nested: tree = RenderNestedSection(store.GetNestedResultsSection(reducePhaseTree), reduceKeyHash, idToDocIdHash, self, indexContext); break; default: throw new ArgumentOutOfRangeException(store.Type.ToString()); } scope.EnsureDispose(tree); yield return(tree); } } }
public void Dispose() { DocumentMapEntries?.Dispose(); DocumentMapEntries = null; MapPhaseTree = null; ReducePhaseTree = null; ProcessedDocEtags.Clear(); ProcessedTombstoneEtags.Clear(); StoreByReduceKeyHash.Clear(); FreedPages.Clear(); }
public void Dispose() { StoreNextMapResultId(); DocumentMapEntries?.Dispose(); DocumentMapEntries = null; MapPhaseTree = null; ReducePhaseTree = null; PageModifiedInReduceTree = null; ProcessedDocEtags.Clear(); ProcessedTombstoneEtags.Clear(); StoreByReduceKeyHash.Clear(); }
private bool TryFindContinuousRange(LowLevelTransaction tx, FixedSizeTree freeSpaceTree, FixedSizeTree.IFixedSizeIterator it, int num, StreamBitArray current, long currentSectionId, out long?page) { page = -1; var start = -1; var count = 0; for (int i = 0; i < NumberOfPagesInSection; i++) { if (current.Get(i)) { if (start == -1) { start = i; } count++; if (count == num) { page = currentSectionId * NumberOfPagesInSection + start; break; } } else { start = -1; count = 0; } } if (count != num) { return(false); } if (current.SetCount == num) { freeSpaceTree.Delete(it.CurrentKey); } else { for (int i = 0; i < num; i++) { current.Set(i + start, false); } Slice val; using (current.ToSlice(tx.Allocator, out val)) freeSpaceTree.Add(it.CurrentKey, val); } return(true); }
private bool ShouldRetrain(FixedSizeTree etagTree) { using (var it = etagTree.Iterate()) { if (it.SeekToLast() == false) { return(false); } long lastEtag = it.CurrentKey; return((lastEtag & 1024) == 0); } }
public static Task DumpFixedSizedTreeToStreamAsync(LowLevelTransaction tx, FixedSizeTree tree, Stream stream) { var headerData = $"{tree.Name} ({tree.Type}) {tree.NumberOfEntries} entries, depth: {tree.Depth}, {tree.PageCount} pages."; return(WriteHtmlAsync(new StreamWriter(stream), async writer => { await writer.WriteLineAsync(headerData); await writer.WriteLineAsync("<div class='css-treeview'><ul>"); await DumpFixedSizeTreeToStreamAsync(tx, tree, writer, tree.Name, tree.Parent); await writer.WriteLineAsync("</ul></div>"); })); }
public static void DumpFixedSizedTreeToStream(LowLevelTransaction tx, FixedSizeTree tree, Stream stream) { var headerData = $"{tree.Name} ({tree.Type}) {tree.NumberOfEntries} entries, depth: {tree.Depth}, {tree.PageCount} pages."; WriteHtml(new StreamWriter(stream), writer => { writer.WriteLine(headerData); writer.WriteLine("<div class='css-treeview'><ul>"); DumpFixedSizeTreeToStream(tx, tree, writer, tree.Name, tree.Parent); writer.WriteLine("</ul></div>"); }); }
private List <double> GetPageDensities(FixedSizeTree tree) { var densities = new List <double>(); var allPages = tree.AllPages(); var pageSize = _tx.Environment.Options.PageSize; for (var i = 0; i < allPages.Count; i++) { var fstp = _tx.GetPage(allPages[i]).ToFixedSizeTreePage(); var sizeUsed = Constants.FixedSizeTreePageHeaderSize + (fstp.NumberOfEntries * (fstp.IsLeaf ? fstp.ValueSize : FixedSizeTree.BranchEntrySize)); densities.Add(((double)sizeUsed) / pageSize); } return(densities); }
public void Invalid_usage_of_DirectAdds() { var numberOfItems = 100; ushort valueSize = Constants.Storage.PageSize / 16; Slice fstName; using (Slice.From(Allocator, "ccc", out fstName)) using (var tx = Env.WriteTransaction()) { var parent = tx.CreateTree("parent"); var allocator = new NewPageAllocator(tx.LowLevelTransaction, parent); allocator.Create(); for (int i = 0; i < 6; i++) { parent.Add($"aaaaa-{i}", new byte[1000]); } parent.Add($"dummy-8", new byte[1300]); for (int i = 0; i < 6; i++) { parent.Delete($"aaaaa-{i}"); } for (int i = 0; i < NewPageAllocator.NumberOfPagesInSection - 1; i++) { allocator.AllocateSinglePage(0); } var fst = new FixedSizeTree(tx.LowLevelTransaction, parent, fstName, valueSize, newPageAllocator: allocator); var bytes = new byte[valueSize]; Slice val; using (Slice.From(Allocator, bytes, out val)) { for (var i = 0; i < numberOfItems; i++) { fst.Add(i, val); } } tx.Commit(); } }
internal List <long> GetStreamPages(FixedSizeTree chunksTree, StreamInfo *info) { var pages = new List <long>(); var chunkIndex = 0; using (var it = chunksTree.Iterate()) { if (it.Seek(0) == false) { return(pages); } var totalSize = 0L; do { var chunk = (ChunkDetails *)it.CreateReaderForCurrent().Base; totalSize += chunk->ChunkSize; long size = chunk->ChunkSize; if (chunkIndex == chunksTree.NumberOfEntries - 1) { // stream info is put after the last chunk size += StreamInfo.SizeOf + info->TagSize; } var numberOfPages = VirtualPagerLegacyExtensions.GetNumberOfOverflowPages(size); for (int i = 0; i < numberOfPages; i++) { pages.Add(chunk->PageNumber + i); } chunkIndex++; } while (it.MoveNext()); if (totalSize != info->TotalSize) { ThrowStreamSizeMismatch(chunksTree.Name, totalSize, info); } return(pages); } }
public FixedSizeTree GetGlobalFixedSizeTree(Slice name, ushort valSize) { if (_globalFixedSizeTree == null) { _globalFixedSizeTree = new Dictionary <Slice, FixedSizeTree>(SliceComparer.Instance); } FixedSizeTree tree; if (_globalFixedSizeTree.TryGetValue(name, out tree) == false) { tree = new FixedSizeTree(LowLevelTransaction, LowLevelTransaction.RootObjects, name, valSize); _globalFixedSizeTree[tree.Name] = tree; } return(tree); }
private unsafe void UnsetValue(FixedSizeTree fst, long pageNumber, int positionInBitmap) { bool isNew; byte *ptr; using (fst.DirectAdd(pageNumber, out isNew, out ptr)) { if (isNew) { ThrowInvalidNewBuffer(); } PtrBitVector.SetBitInPointer(ptr, positionInBitmap, false); // ptr[positionInBitmap / 8] &= (byte) ~(1 << (positionInBitmap % 8)); } }
private static void CopyFixedSizeTree(FixedSizeTree fst, Func <Transaction, FixedSizeTree> createDestinationTree, StorageEnvironment compactedEnv, TransactionPersistentContext context, Action <long> onEntriesCopiedProgress, Action onAllEntriesCopied, CancellationToken token) { using (var it = fst.Iterate()) { var copiedEntries = 0L; if (it.Seek(Int64.MinValue) == false) { return; } do { token.ThrowIfCancellationRequested(); using (var txw = compactedEnv.WriteTransaction(context)) { var snd = createDestinationTree(txw); var transactionSize = 0L; do { token.ThrowIfCancellationRequested(); using (it.Value(out var val)) snd.Add(it.CurrentKey, val); transactionSize += fst.ValueSize + sizeof(long); copiedEntries++; var reportRate = fst.NumberOfEntries / 33 + 1; if (copiedEntries % reportRate == 0) { onEntriesCopiedProgress(copiedEntries); } } while (transactionSize < compactedEnv.Options.MaxScratchBufferSize / 2 && it.MoveNext()); txw.Commit(); } compactedEnv.FlushLogToDataFile(); if (fst.NumberOfEntries == copiedEntries) { onAllEntriesCopied(); } } while (it.MoveNext()); } }
public FixedSizeTree FixedTreeFor(string key, byte valSize = 0) { if (_fixedSizeTrees == null) { _fixedSizeTrees = new Dictionary <string, FixedSizeTree>(); } FixedSizeTree fixedTree; if (_fixedSizeTrees.TryGetValue(key, out fixedTree) == false) { _fixedSizeTrees[key] = fixedTree = new FixedSizeTree(_tx, this, key, valSize); } State.Flags |= TreeFlags.FixedSizeTrees; return(fixedTree); }
internal unsafe MapReduceResultsStore CreateResultsStore(FixedSizeTree typePerHash, ulong reduceKeyHash, TransactionOperationContext indexContext, bool create) { MapResultsStorageType type; using (typePerHash.Read((long)reduceKeyHash, out Slice read)) { if (read.HasValue) { type = (MapResultsStorageType)(*read.CreateReader().Base); } else { type = MapResultsStorageType.Nested; } } return(new MapReduceResultsStore(reduceKeyHash, type, indexContext, MapReduceWorkContext, create)); }
public FixedSizeTree FixedTreeFor(string key, byte valSize = 0) { if (_fixedSizeTrees == null) { _fixedSizeTrees = new Dictionary <string, FixedSizeTree>(); } FixedSizeTree fixedTree; if (_fixedSizeTrees.TryGetValue(key, out fixedTree) == false) { var keySlice = Slice.From(_llt.Allocator, key, ByteStringType.Immutable); _fixedSizeTrees[key] = fixedTree = new FixedSizeTree(_llt, this, keySlice, valSize); } State.Flags |= TreeFlags.FixedSizeTrees; return(fixedTree); }
public FixedSizeTree FixedTreeFor(Slice key, byte valSize = 0) { if (_fixedSizeTrees == null) { _fixedSizeTrees = new Dictionary <Slice, FixedSizeTree>(SliceComparer.Instance); } FixedSizeTree fixedTree; if (_fixedSizeTrees.TryGetValue(key, out fixedTree) == false) { fixedTree = new FixedSizeTree(_llt, this, key, valSize); _fixedSizeTrees[fixedTree.Name] = fixedTree; } State.Flags |= TreeFlags.FixedSizeTrees; return(fixedTree); }