public unsafe StorageEnvironment(StorageEnvironmentOptions options) { try { _options = options; _dataPager = options.DataPager; _freeSpaceHandling = new FreeSpaceHandling(); _sliceComparer = NativeMethods.memcmp; _headerAccessor = new HeaderAccessor(this); var isNew = _headerAccessor.Initialize(); _scratchBufferPool = new ScratchBufferPool(this); _journal = new WriteAheadJournal(this); if (isNew) CreateNewDatabase(); else // existing db, let us load it LoadExistingDatabase(); State.FreeSpaceRoot.Name = Constants.FreeSpaceTreeName; State.Root.Name = Constants.RootTreeName; Writer = new TransactionMergingWriter(this, _cancellationTokenSource.Token); if (_options.ManualFlushing == false) _flushingTask = FlushWritesToDataFileAsync(); } catch (Exception) { Dispose(); throw; } }
public PageSplitter(Transaction tx, Tree tree, SliceComparer cmp, Slice newKey, int len, long pageNumber, NodeFlags nodeType, ushort nodeVersion, Cursor cursor, TreeMutableState treeState) { _tx = tx; _tree = tree; _cmp = cmp; _newKey = newKey; _len = len; _pageNumber = pageNumber; _nodeType = nodeType; _nodeVersion = nodeVersion; _cursor = cursor; _treeState = treeState; Page page = _cursor.Pages.First.Value; _page = tx.ModifyPage(page.PageNumber, page); _cursor.Pop(); }
public override void Setup() { base.Setup(); using (var tx = Env.WriteTransaction()) { tx.CreateTree(TreeNameSlice); tx.Commit(); } var totalPairs = Utils.GenerateUniqueRandomSlicePairs( NumberOfTransactions * NumberOfRecordsPerTransaction, KeyLength, RandomSeed == -1 ? null as int? : RandomSeed); // This will sort just the KEYS totalPairs.Sort((x, y) => SliceComparer.Compare(x.Item1, y.Item1)); // Distribute keys in such a way that _pairs[i][k] < _pairs[j][m] // iff i < j, for all k and m. _pairs = new List <Tuple <Slice, Slice> > [NumberOfTransactions]; for (var i = 0; i < NumberOfTransactions; i++) { _pairs[i] = totalPairs.Take(NumberOfRecordsPerTransaction).ToList(); totalPairs.RemoveRange(0, NumberOfRecordsPerTransaction); } }
public IIterator MultiRead(Slice key) { TreeNodeHeader *node; var page = FindPageFor(key, out node); if (page == null || page.LastMatch != 0) { return(new EmptyIterator()); } Debug.Assert(node != null); Slice fetchedNodeKey; using (TreeNodeHeader.ToSlicePtr(_llt.Allocator, node, out fetchedNodeKey)) { if (SliceComparer.Equals(fetchedNodeKey, key) == false) { VoronUnrecoverableErrorException.Raise(_llt.Environment, "Was unable to retrieve the correct node. Data corruption possible"); } } if (node->Flags == TreeNodeFlags.MultiValuePageRef) { var tree = OpenMultiValueTree(key, node); return(tree.Iterate(true)); } var ptr = DirectAccessFromHeader(node); var nestedPage = new TreePage(ptr, (ushort)GetDataSize(node)); return(new TreePageIterator(_llt, key, this, nestedPage)); }
public void Validate(FixedSizeSchemaIndexDef actual) { if (actual == null) { throw new ArgumentNullException(nameof(actual), "Expected an index but received null"); } if (!SliceComparer.Equals(Name, actual.Name)) { throw new ArgumentException( $"Expected index to have Name='{Name}', got Name='{actual.Name}' instead", nameof(actual)); } if (StartIndex != actual.StartIndex) { throw new ArgumentException( $"Expected index {Name} to have StartIndex='{StartIndex}', got StartIndex='{actual.StartIndex}' instead", nameof(actual)); } if (IsGlobal != actual.IsGlobal) { throw new ArgumentException( $"Expected index {Name} to have IsGlobal='{IsGlobal}', got IsGlobal='{actual.IsGlobal}' instead", nameof(actual)); } }
public int GetDataSize(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p.LastMatch != 0) { return(-1); } if (node == null) { return(-1); } Slice nodeKey; using (TreeNodeHeader.ToSlicePtr(_llt.Allocator, node, out nodeKey)) { if (!SliceComparer.EqualsInline(nodeKey, key)) { return(-1); } } return(GetDataSize(node)); }
public void Single_MultiAdd_And_Read_DataStored() { var random = new Random(); var buffer = new byte[1000]; random.NextBytes(buffer); using (var tx = Env.WriteTransaction()) { tx.CreateTree("foo"); tx.Commit(); } using (var tx = Env.WriteTransaction()) { Slice key; Slice.From(Allocator, buffer, out key); tx.ReadTree("foo").MultiAdd("ChildTreeKey", key); tx.Commit(); } using (var tx = Env.ReadTransaction()) { using (var fetchedDataIterator = tx.ReadTree("foo").MultiRead("ChildTreeKey")) { fetchedDataIterator.Seek(Slices.BeforeAllKeys); Slice key; Slice.From(Allocator, buffer, out key); Assert.True(SliceComparer.Equals(fetchedDataIterator.CurrentKey, key)); } } }
public FoundTreePage Find(Slice key) { int position = _current; int itemsLeft = _cacheSize; while (itemsLeft > 0) { var page = _cache[position % _cacheSize]; if (page == null) { itemsLeft--; position++; continue; } var first = page.FirstKey; var last = page.LastKey; switch (key.Options) { case SliceOptions.Key: if ((first.Options != SliceOptions.BeforeAllKeys && SliceComparer.Compare(key, first) < 0)) { break; } if (last.Options != SliceOptions.AfterAllKeys && SliceComparer.Compare(key, last) > 0) { break; } return(page); case SliceOptions.BeforeAllKeys: if (first.Options == SliceOptions.BeforeAllKeys) { return(page); } break; case SliceOptions.AfterAllKeys: if (last.Options == SliceOptions.AfterAllKeys) { return(page); } break; default: throw new ArgumentException(key.Options.ToString()); } itemsLeft--; position++; } return(null); }
public static int Compare(Slice x, PrefixedSlice y, SliceComparer cmp, ushort size) { fixed (byte* p1 = x.Array) fixed (byte* p2 = y.NonPrefixedData.Array) { var xPtr = p1 != null ? p1 : x.Pointer; var yPtr = p2 != null ? p2 : y.NonPrefixedData.Pointer; if (y.Header.PrefixId == PrefixedSlice.NonPrefixedId) return Compare(null, 0, null, 0, xPtr, x.KeyLength, yPtr, y.Header.NonPrefixedDataSize, cmp, size); if (x.PrefixComparisonCache == null) { if(y.Prefix == null) return Compare(null, 0, null, 0, xPtr, x.KeyLength, yPtr, y.Header.NonPrefixedDataSize, cmp, size); else if (y.Prefix.Value == null) return Compare(null, 0, y.Prefix.ValuePtr, y.Header.PrefixUsage, xPtr, x.KeyLength, yPtr, y.Header.NonPrefixedDataSize, cmp, size); else { fixed (byte* prefixVal = y.Prefix.Value) return Compare(null, 0, prefixVal, y.Header.PrefixUsage, xPtr, x.KeyLength, yPtr, y.Header.NonPrefixedDataSize, cmp, size); } } var prefixBytesToCompare = Math.Min(y.Header.PrefixUsage, x.KeyLength); int r; if (x.PrefixComparisonCache.TryGetCachedResult(y.Header.PrefixId, y.Prefix.PageNumber, prefixBytesToCompare, out r) == false) { if (y.Prefix == null) r = Compare(null, 0, null, 0, xPtr, x.KeyLength, null, 0, cmp, prefixBytesToCompare); else if (y.Prefix.Value == null) r = Compare(null, 0, y.Prefix.ValuePtr, y.Header.PrefixUsage, xPtr, x.KeyLength, null, 0, cmp, prefixBytesToCompare); else { fixed (byte* prefixVal = y.Prefix.Value) r = Compare(null, 0, prefixVal, y.Header.PrefixUsage, xPtr, x.KeyLength, null, 0, cmp, prefixBytesToCompare); } x.PrefixComparisonCache.SetPrefixComparisonResult(y.Header.PrefixId, y.Prefix.PageNumber, prefixBytesToCompare, r); } if (r != 0) return r; size -= prefixBytesToCompare; return Compare(null, 0, null, 0, xPtr + prefixBytesToCompare, (ushort)(x.KeyLength - prefixBytesToCompare), yPtr, y.Header.NonPrefixedDataSize, cmp, size); } }
public PageSplitter(Transaction tx, SliceComparer cmp, Slice newKey, int len, long pageNumber, Cursor cursor, TreeDataInTransaction txInfo) { _tx = tx; _cmp = cmp; _newKey = newKey; _len = len; _pageNumber = pageNumber; _cursor = cursor; _txInfo = txInfo; _page = _cursor.Pop(); }
public TableSchema DefineFixedSizeIndex(FixedSizeSchemaIndexDef index) { if (!index.Name.HasValue || SliceComparer.Equals(Slices.Empty, index.Name)) { throw new ArgumentException("Fixed size index name must be non-empty", nameof(index)); } _fixedSizeIndexes[index.Name] = index; return(this); }
public ushort FindPrefixSize(MemorySlice other) { _matchedBytes = 0; if (_matchPrefixInstance == null) _matchPrefixInstance = MatchPrefix; CompareData(other, _matchPrefixInstance, Math.Min(KeyLength, other.KeyLength)); return _matchedBytes; }
public void DebugValidate(Tree tree, long root) { if (NumberOfEntries == 0) { return; } #if VALIDATE if (Freed) { return; } #endif if (IsBranch && NumberOfEntries < 2) { throw new InvalidOperationException("The branch page " + PageNumber + " has " + NumberOfEntries + " entry"); } Slice prev; var prevScope = GetNodeKey(tree.Llt, 0, out prev); try { var pages = new HashSet <long>(); for (int i = 1; i < NumberOfEntries; i++) { var node = GetNode(i); Slice current; var currentScope = GetNodeKey(tree.Llt, i, out current); if (SliceComparer.CompareInline(prev, current) >= 0) { DebugStuff.RenderAndShowTree(tree, root); throw new InvalidOperationException("The page " + PageNumber + " is not sorted"); } if (node->Flags == (TreeNodeFlags.PageRef)) { if (pages.Add(node->PageNumber) == false) { DebugStuff.RenderAndShowTree(tree, root); throw new InvalidOperationException("The page " + PageNumber + " references same page multiple times"); } } prevScope.Dispose(); prev = current; prevScope = currentScope; } } finally { prevScope.Dispose(); } }
private void FixedSchemaIndexDefEqual(TableSchema.FixedSizeSchemaIndexDef expectedIndex, TableSchema.FixedSizeSchemaIndexDef actualIndex) { if (expectedIndex == null) { Assert.Equal(null, actualIndex); } else { Assert.Equal(expectedIndex.IsGlobal, actualIndex.IsGlobal); Assert.True(SliceComparer.Equals(expectedIndex.Name, actualIndex.Name)); Assert.Equal(expectedIndex.StartIndex, actualIndex.StartIndex); } }
public static Tree Create(Transaction tx, SliceComparer cmp, TreeFlags flags = TreeFlags.None) { var newRootPage = NewPage(tx, PageFlags.Leaf, 1); var tree = new Tree(cmp, newRootPage.PageNumber) { _state = { Depth = 1, Flags = flags, InWriteTransaction = true } }; tree.State.RecordNewPage(newRootPage, 1); return tree; }
public static Tree Open(Transaction tx, SliceComparer cmp, TreeRootHeader *header) { return(new Tree(cmp, header->RootPageNumber) { _state = { PageCount = header->PageCount, BranchPages = header->BranchPages, Depth = header->Depth, OverflowPages = header->OverflowPages, LeafPages = header->LeafPages, EntriesCount = header->EntriesCount, Flags = header->Flags } }); }
public static Tree Create(Transaction tx, SliceComparer cmp, TreeFlags flags = TreeFlags.None) { var newRootPage = NewPage(tx, PageFlags.Leaf, 1); var tree = new Tree(cmp, newRootPage.PageNumber) { _state = { Depth = 1, Flags = flags } }; var txInfo = tx.GetTreeInformation(tree); txInfo.RecordNewPage(newRootPage, 1); return(tree); }
public static Tree Create(Transaction tx, SliceComparer cmp, TreeFlags flags = TreeFlags.None) { var newRootPage = NewPage(tx, PageFlags.Leaf, 1); var tree = new Tree(cmp, newRootPage.PageNumber) { _state = { Depth = 1, Flags = flags, InWriteTransaction = true } }; tree.State.RecordNewPage(newRootPage, 1); return(tree); }
public int GetDataSize(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p == null || p.LastMatch != 0) { return(-1); } if (node == null || !SliceComparer.EqualsInline(TreeNodeHeader.ToSlicePtr(_llt.Allocator, node), key)) { return(-1); } return(TreeNodeHeader.GetDataSize(_llt, node)); }
public static Tree Open(Transaction tx, SliceComparer cmp, TreeRootHeader* header) { return new Tree(cmp, header->RootPageNumber) { _state = { PageCount = header->PageCount, BranchPages = header->BranchPages, Depth = header->Depth, OverflowPages = header->OverflowPages, LeafPages = header->LeafPages, EntriesCount = header->EntriesCount, Flags = header->Flags, InWriteTransaction = tx.Flags.HasFlag(TransactionFlags.ReadWrite) } }; }
public ushort ReadVersion(Slice key) { TreeNodeHeader *node; var p = FindPageFor(key, out node); if (p == null || p.LastMatch != 0) { return(0); } if (node == null || !SliceComparer.EqualsInline(TreeNodeHeader.ToSlicePtr(_llt.Allocator, node), key)) { return(0); } return(node->Version); }
public bool Equals(Tuple <Tree, Slice> x, Tuple <Tree, Slice> y) { if (x == null && y == null) { return(true); } if (x == null || y == null) { return(false); } if (x.Item1 != y.Item1) { return(false); } return(SliceComparer.Equals(x.Item2, y.Item2)); }
public void DeleteTree(Slice name) { if (_lowLevelTransaction.Flags == TransactionFlags.ReadWrite == false) { throw new ArgumentException("Cannot create a new newRootTree with a read only transaction"); } Tree tree = ReadTree(name); if (tree == null) { return; } foreach (var page in tree.AllPages()) { _lowLevelTransaction.FreePage(page); } _lowLevelTransaction.RootObjects.Delete(name); if (_multiValueTrees != null) { var toRemove = new List <Tuple <Tree, Slice> >(); foreach (var valueTree in _multiValueTrees) { var multiTree = valueTree.Key.Item1; if (SliceComparer.Equals(multiTree.Name, name)) { toRemove.Add(valueTree.Key); } } foreach (var recordToRemove in toRemove) { _multiValueTrees.Remove(recordToRemove); } } // already created in ReadTree _trees.Remove(name); }
protected unsafe Tuple <Slice, Slice> ReadKey(Transaction txh, Tree tree, Slice key) { tree.FindPageFor(key, out var node); if (node == null) { return(null); } TreeNodeHeader.ToSlicePtr(txh.Allocator, node, out var item1); if (SliceComparer.CompareInline(item1, key) != 0) { return(null); } Slice.External(txh.Allocator, (byte *)node + node->KeySize + Constants.Tree.NodeHeaderSize, (ushort)node->DataSize, ByteStringType.Immutable, out var item2); return(Tuple.Create(item1, item2)); }
public unsafe static bool ValidateCurrentKey <T>(this T self, LowLevelTransaction tx, TreeNodeHeader *node) where T : IIterator { if (self.RequiredPrefix.HasValue) { var currentKey = TreeNodeHeader.ToSlicePtr(tx.Allocator, node); if (SliceComparer.StartWith(currentKey, self.RequiredPrefix) == false) { return(false); } } if (self.MaxKey.HasValue) { var currentKey = TreeNodeHeader.ToSlicePtr(tx.Allocator, node); if (SliceComparer.CompareInline(currentKey, self.MaxKey) >= 0) { return(false); } } return(true); }
public void RenameTree(Slice fromName, Slice toName) { if (_lowLevelTransaction.Flags == TransactionFlags.ReadWrite == false) { throw new ArgumentException("Cannot rename a new tree with a read only transaction"); } if (SliceComparer.Equals(toName, Constants.RootTreeNameSlice)) { throw new InvalidOperationException("Cannot create a tree with reserved name: " + toName); } if (ReadTree(toName) != null) { throw new ArgumentException("Cannot rename a tree with the name of an existing tree: " + toName); } Tree fromTree = ReadTree(fromName); if (fromTree == null) { throw new ArgumentException("Tree " + fromName + " does not exists"); } _lowLevelTransaction.RootObjects.Delete(fromName); byte *ptr; using (_lowLevelTransaction.RootObjects.DirectAdd(toName, sizeof(TreeRootHeader), out ptr)) fromTree.State.CopyTo((TreeRootHeader *)ptr); fromTree.Rename(toName); fromTree.State.IsModified = true; // _trees already ensrued already created in ReadTree _trees.Remove(fromName); _trees.Remove(toName); AddTree(toName, fromTree); }
public bool TryFindPageForReading(Slice key, LowLevelTransaction tx, out DecompressedLeafPage result) { Debug.Assert(key.Options == SliceOptions.Key); var position = _current; var itemsLeft = Size; while (itemsLeft > 0) { var page = _cache[position % Size]; if (page == null || page.Usage != DecompressionUsage.Read || page.NumberOfEntries == 0) // decompressed page can has 0 entries if each compressed entry had a tombstone marker { itemsLeft--; position++; continue; } Slice first; Slice last; using (page.GetNodeKey(tx, 0, out first)) using (page.GetNodeKey(tx, page.NumberOfEntries - 1, out last)) { if (SliceComparer.Compare(key, first) >= 0 && SliceComparer.Compare(key, last) <= 0) { result = page; return(true); } } itemsLeft--; position++; } result = null; return(false); }
public override void Setup() { base.Setup(); using (var tx = Env.WriteTransaction()) { Schema.Create(tx, TableNameSlice, 16); tx.Commit(); } var totalPairs = Utils.GenerateUniqueRandomSlicePairs( NumberOfTransactions * NumberOfRecordsPerTransaction, KeyLength, RandomSeed == -1 ? null as int? : RandomSeed); // This will sort just the KEYS totalPairs.Sort((x, y) => SliceComparer.Compare(x.Item1, y.Item1)); // Distribute keys in such a way that _valueBuilders[i][k] < // _valueBuilders[j][m] iff i < j, for all k and m. _valueBuilders = new List <TableValueBuilder> [NumberOfTransactions]; for (int i = 0; i < NumberOfTransactions; i++) { var values = totalPairs.Take(NumberOfRecordsPerTransaction); totalPairs.RemoveRange(0, NumberOfRecordsPerTransaction); _valueBuilders[i] = new List <TableValueBuilder>(); foreach (var pair in values) { _valueBuilders[i].Add(new TableValueBuilder { pair.Item1, pair.Item2 }); } } }
public void DebugValidate(LowLevelTransaction tx, long root) { if (NumberOfEntries == 0) { return; } if (IsBranch && NumberOfEntries < 2) { throw new InvalidOperationException("The branch page " + PageNumber + " has " + NumberOfEntries + " entry"); } var prev = GetNodeKey(tx, 0); var pages = new HashSet <long>(); for (int i = 1; i < NumberOfEntries; i++) { var node = GetNode(i); var current = GetNodeKey(tx, i); if (SliceComparer.CompareInline(prev, current) >= 0) { DebugStuff.RenderAndShowTree(tx, root); throw new InvalidOperationException("The page " + PageNumber + " is not sorted"); } if (node->Flags == (TreeNodeFlags.PageRef)) { if (pages.Add(node->PageNumber) == false) { DebugStuff.RenderAndShowTree(tx, root); throw new InvalidOperationException("The page " + PageNumber + " references same page multiple times"); } } prev = current; } }
public TableSchema DefineKey(SchemaIndexDef index) { bool hasEmptyName = !index.Name.HasValue || SliceComparer.Equals(Slices.Empty, index.Name); if (index.IsGlobal && hasEmptyName) { throw new ArgumentException("Name must be non empty for global index as primary key", nameof(index)); } if (hasEmptyName) { index.Name = PkSlice; } if (index.Count > 1) { throw new InvalidOperationException("Primary key must be a single field"); } _primaryKey = index; return(this); }
public int CompareTo(BatchOperation other) { var r = SliceComparer.CompareInline(Key, other.Key); if (r != 0) { return(r); } if (ValueSlice != null) { if (other.ValueSlice == null) { return(-1); } return(SliceComparer.CompareInline(ValueSlice, other.ValueSlice)); } else if (other.ValueSlice != null) { return(1); } return(0); }
public static int Compare(PrefixedSlice x, PrefixedSlice y, SliceComparer cmp, ushort size) { fixed (byte* p1 = x.NonPrefixedData.Array) fixed (byte* p2 = y.NonPrefixedData.Array) { var xPtr = p1 != null ? p1 : x.NonPrefixedData.Pointer; var yPtr = p2 != null ? p2 : y.NonPrefixedData.Pointer; byte* xPre = null; byte* yPre = null; fixed (byte* pre1 = x.Prefix != null ? x.Prefix.Value : null) fixed (byte* pre2 = y.Prefix != null ? y.Prefix.Value : null) { if(x.Prefix != null) xPre = pre1 != null ? pre1 : x.Prefix.ValuePtr; if (y.Prefix != null) yPre = pre2 != null ? pre2 : y.Prefix.ValuePtr; return Compare(xPre, x.Header.PrefixUsage, yPre, y.Header.PrefixUsage, xPtr, x.Header.NonPrefixedDataSize, yPtr, y.Header.NonPrefixedDataSize, cmp, size); } } }
protected unsafe Tuple <Slice, Slice> ReadKey(Transaction txh, Tree tree, Slice key) { TreeNodeHeader *node; tree.FindPageFor(key, out node); if (node == null) { return(null); } Slice item1; TreeNodeHeader.ToSlicePtr(txh.Allocator, node, out item1); if (!SliceComparer.Equals(item1, key)) { return(null); } Slice item2; Slice.External(txh.Allocator, (byte *)node + node->KeySize + Constants.Tree.NodeHeaderSize, (ushort)node->DataSize, out item2); return(Tuple.Create(item1, item2)); }
protected override int CompareData(MemorySlice other, SliceComparer cmp, ushort size) { var otherSlice = other as Slice; if (otherSlice != null) { if (Array != null) { fixed (byte* a = Array) { if (otherSlice.Array != null) { fixed (byte* b = otherSlice.Array) { return cmp(a, b, size); } } return cmp(a, otherSlice.Pointer, size); } } if (otherSlice.Array != null) { fixed (byte* b = otherSlice.Array) { return cmp(Pointer, b, size); } } return cmp(Pointer, otherSlice.Pointer, size); } var prefixedSlice = other as PrefixedSlice; if (prefixedSlice != null) return SliceComparisonMethods.Compare(this, prefixedSlice, cmp, size); throw new NotSupportedException("Cannot compare because of unknown slice type: " + other.GetType()); }
private Tree(Transaction tx, SliceComparer cmp, TreeMutableState state) { _tx = tx; _cmp = cmp; _state = state; }
public int Compare(Slice other, SliceComparer cmp) { Debug.Assert(Options == SliceOptions.Key); Debug.Assert(other.Options == SliceOptions.Key); var r = CompareData(other, cmp, Math.Min(Size, other.Size)); if (r != 0) return r; return Size - other.Size; }
protected override int CompareData(MemorySlice other, SliceComparer cmp, ushort size) { var prefixedSlice = other as PrefixedSlice; if (prefixedSlice != null) return SliceComparisonMethods.Compare(this, prefixedSlice, cmp, size); var slice = other as Slice; if (slice != null) { return SliceComparisonMethods.Compare(slice, this, cmp, size) * -1; } throw new NotSupportedException("Cannot compare because of unknown slice type: " + other.GetType()); }
public SingleEntryIterator(SliceComparer cmp, NodeHeader* item, Transaction tx) { _cmp = cmp; _item = item; _tx = tx; }
public NodeHeader* Search(Slice key, SliceComparer cmp) { if (NumberOfEntries == 0) { LastSearchPosition = 0; LastMatch = 1; return null; } if (key.Options == SliceOptions.BeforeAllKeys) { LastSearchPosition = 0; LastMatch = 1; return GetNode(0); } if (key.Options == SliceOptions.AfterAllKeys) { LastMatch = -1; LastSearchPosition = NumberOfEntries - 1; return GetNode(LastSearchPosition); } int low = IsLeaf ? 0 : 1; int high = NumberOfEntries - 1; int position = 0; var pageKey = new Slice(SliceOptions.Key); bool matched = false; NodeHeader* node = null; while (low <= high) { position = (low + high) >> 1; node = GetNode(position); pageKey.Set(node); LastMatch = key.Compare(pageKey, cmp); matched = true; if (LastMatch == 0) break; if (LastMatch > 0) low = position + 1; else high = position - 1; } if (matched == false) { LastMatch = key.Compare(pageKey, cmp); } if (LastMatch > 0) // found entry less than key { position++; // move to the smallest entry larger than the key } Debug.Assert(position < ushort.MaxValue); LastSearchPosition = position; if (position >= NumberOfEntries) return null; return GetNode(position); }
private byte *SplitPageInHalf(TreePage rightPage) { bool toRight; var currentIndex = _page.LastSearchPosition; var splitIndex = _page.NumberOfEntries / 2; if (currentIndex <= splitIndex) { toRight = false; } else { toRight = true; var leftPageEntryCount = splitIndex; var rightPageEntryCount = _page.NumberOfEntries - leftPageEntryCount + 1; if (rightPageEntryCount > leftPageEntryCount) { splitIndex++; Debug.Assert(splitIndex < _page.NumberOfEntries); } } if (_page.IsLeaf) { splitIndex = AdjustSplitPosition(currentIndex, splitIndex, ref toRight); } Slice currentKey; using (_page.GetNodeKey(_tx, splitIndex, out currentKey)) { Slice seperatorKey; if (toRight && splitIndex == currentIndex) { seperatorKey = SliceComparer.Compare(currentKey, _newKey) < 0 ? currentKey : _newKey; } else { seperatorKey = currentKey; } var addedAsImplicitRef = false; var parentOfPage = _cursor.CurrentPage; TreePage parentOfRight; DecompressedLeafPage rightDecompressed = null; if (_pageDecompressed != null) { // splitting the decompressed page, let's allocate the page of the same size to ensure enough space rightDecompressed = _tx.Environment.DecompressionBuffers.GetPage(_tx, _pageDecompressed.PageSize, DecompressionUsage.Write, rightPage); rightPage = rightDecompressed; } using (rightDecompressed) { AddSeparatorToParentPage(rightPage.PageNumber, seperatorKey, out parentOfRight); if (_page.IsBranch && toRight && SliceComparer.EqualsInline(seperatorKey, _newKey)) { // _newKey needs to be inserted as first key (BeforeAllKeys) to the right page, so we need to add it before we move entries from the current page AddNodeToPage(rightPage, 0, Slices.BeforeAllKeys); addedAsImplicitRef = true; } // move the actual entries from page to right page ushort nKeys = _page.NumberOfEntries; for (int i = splitIndex; i < nKeys; i++) { TreeNodeHeader *node = _page.GetNode(i); if (_page.IsBranch && rightPage.NumberOfEntries == 0) { rightPage.CopyNodeDataToEndOfPage(node, Slices.BeforeAllKeys); } else { Slice instance; using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, node, out instance)) { rightPage.CopyNodeDataToEndOfPage(node, instance); } } } if (rightDecompressed != null) { rightDecompressed.CopyToOriginal(_tx, defragRequired: false, wasModified: true); rightPage = rightDecompressed.Original; } } _page.Truncate(_tx, splitIndex); RecompressPageIfNeeded(wasModified: true); byte *pos; if (addedAsImplicitRef == false) { try { if (toRight && _cursor.CurrentPage.PageNumber != parentOfRight.PageNumber) { // modify the cursor if we are going to insert to the right page _cursor.Pop(); _cursor.Push(parentOfRight); } // actually insert the new key pos = InsertNewKey(toRight ? rightPage : _page); } catch (InvalidOperationException e) { if ( e.Message.StartsWith("The page is full and cannot add an entry", StringComparison.Ordinal) == false) { throw; } throw new InvalidOperationException( GatherDetailedDebugInfo(rightPage, currentKey, seperatorKey, currentIndex, splitIndex, toRight), e); } } else { pos = null; _cursor.Push(rightPage); } if (_page.IsBranch) // remove a branch that has only one entry, the page ref needs to be added to the parent of the current page { Debug.Assert(_page.NumberOfEntries > 0); Debug.Assert(rightPage.NumberOfEntries > 0); if (_page.NumberOfEntries == 1) { RemoveBranchWithOneEntry(_page, parentOfPage); } if (rightPage.NumberOfEntries == 1) { RemoveBranchWithOneEntry(rightPage, parentOfRight); } } return(pos); } }
public int NodePositionFor(Slice key, SliceComparer cmp) { Search(key, cmp); return LastSearchPosition; }
private int CompareData(Slice other, SliceComparer cmp, ushort size) { if (_array != null) { fixed (byte* a = _array) { if (other._array != null) { fixed (byte* b = other._array) { return cmp(a, b, size); } } return cmp(a, other._pointer, size); } } if (other._array != null) { fixed (byte* b = other._array) { return cmp(_pointer, b, size); } } return cmp(_pointer, other._pointer, size); }
public void DebugValidate(Transaction tx, SliceComparer comparer, long root) { if (NumberOfEntries == 0) return; var prev = new Slice(GetNode(0)); var pages = new HashSet<long>(); for (int i = 1; i < NumberOfEntries; i++) { var node = GetNode(i); var current = new Slice(node); if (prev.Compare(current, comparer) >= 0) { DebugStuff.RenderAndShow(tx, root, 1); throw new InvalidOperationException("The page " + PageNumber + " is not sorted"); } if (node->Flags==(NodeFlags.PageRef)) { if (pages.Add(node->PageNumber) == false) { DebugStuff.RenderAndShow(tx, root, 1); throw new InvalidOperationException("The page " + PageNumber + " references same page multiple times"); } } prev = current; } }
public static int Compare(byte* prefix_x, ushort prefix_x_len, byte* prefix_y, ushort prefix_y_len, byte* x, ushort x_len, byte* y, ushort y_len, SliceComparer cmp, ushort size) { if (size == 0) // empty slice before all keys return 0; if (prefix_x_len == 0 && prefix_y_len == 0) return cmp(x, y, size); ushort toCompare; if (prefix_x_len == 0) { toCompare = Math.Min(prefix_y_len, size); var r = cmp(x, prefix_y, toCompare); if (r != 0) return r; size -= toCompare; return cmp(x + prefix_y_len, y, size); } if (prefix_y_len == 0) { toCompare = Math.Min(prefix_x_len, size); var r = cmp(prefix_x, y, toCompare); if (r != 0) return r; size -= toCompare; return cmp(x, y + prefix_x_len, size); } if (prefix_x_len > prefix_y_len) { var r = cmp(prefix_x, prefix_y, prefix_y_len); if (r != 0) return r; size -= prefix_y_len; toCompare = Math.Min((ushort)(prefix_x_len - prefix_y_len), size); r = cmp(prefix_x + prefix_y_len, y, toCompare); if (r != 0) return r; size -= toCompare; return cmp(x, y + toCompare, size); } else { var r = cmp(prefix_x, prefix_y, prefix_x_len); if (r != 0) return r; size -= prefix_x_len; toCompare = Math.Min((ushort)(prefix_y_len - prefix_x_len), size); r = cmp(x, prefix_y + prefix_x_len, toCompare); if (r != 0) return r; size -= toCompare; return cmp(x + toCompare, y, size); } }
protected abstract int CompareData(MemorySlice other, SliceComparer cmp, ushort size);
public bool StartsWith(Slice other, SliceComparer cmp) { if (Size < other.Size) return false; return CompareData(other, cmp, other.Size) == 0; }
private Tree(Transaction tx, SliceComparer cmp, long root) { _tx = tx; _cmp = cmp; _state.RootPageNumber = root; }
private Tree(SliceComparer cmp, long root) { _cmp = cmp; _state.RootPageNumber = root; }
public unsafe void ShouldPreserveTables(int entries, int seed) { // Create random docs to check everything is preserved using (var allocator = new ByteStringContext(SharedMultipleUseFlag.None)) { var create = new Dictionary <Slice, long>(); var delete = new List <Slice>(); var r = new Random(seed); for (var i = 0; i < entries; i++) { Slice key; Slice.From(allocator, "test" + i, out key); create.Add(key, r.Next()); if (r.NextDouble() < 0.5) { delete.Add(key); } } // Create the schema var schema = new TableSchema() .DefineKey(new TableSchema.SchemaIndexDef { StartIndex = 0, Count = 1, IsGlobal = false }); using (var env = new StorageEnvironment(StorageEnvironmentOptions.ForPath(DataDir))) { // Create table in the environment using (var tx = env.WriteTransaction()) { schema.Create(tx, "test", 16); var table = tx.OpenTable(schema, "test"); foreach (var entry in create) { var value = entry.Value; table.Set(new TableValueBuilder { entry.Key, value }); } tx.Commit(); } using (var tx = env.ReadTransaction()) { var table = tx.OpenTable(schema, "test"); Assert.Equal(table.NumberOfEntries, entries); } // Delete some of the entries (this is so that compaction makes sense) using (var tx = env.WriteTransaction()) { var table = tx.OpenTable(schema, "test"); foreach (var entry in delete) { table.DeleteByKey(entry); } tx.Commit(); } } var compactedData = Path.Combine(DataDir, "Compacted"); StorageCompaction.Execute(StorageEnvironmentOptions.ForPath(DataDir), (StorageEnvironmentOptions.DirectoryStorageEnvironmentOptions) StorageEnvironmentOptions.ForPath(compactedData)); using (var compacted = new StorageEnvironment(StorageEnvironmentOptions.ForPath(compactedData))) { using (var tx = compacted.ReadTransaction()) { var table = tx.OpenTable(schema, "test"); foreach (var entry in create) { TableValueReader reader; var hasValue = table.ReadByKey(entry.Key, out reader); if (delete.Contains(entry.Key)) { // This key should not be here Assert.False(hasValue); } else { // This key should be there Assert.True(hasValue); // Data should be the same int size; byte *ptr = reader.Read(0, out size); Slice current; using (Slice.External(allocator, ptr, size, out current)) Assert.True(SliceComparer.Equals(current, entry.Key)); ptr = reader.Read(1, out size); Assert.Equal(entry.Value, *(long *)ptr); } } tx.Commit(); } } } }
public unsafe static bool ValidateCurrentKey(this IIterator self, NodeHeader* node, SliceComparer cmp) { if (self.RequiredPrefix != null) { var currentKey = new Slice(node); if (currentKey.StartsWith(self.RequiredPrefix, cmp) == false) return false; } if (self.MaxKey != null) { var currentKey = new Slice(node); if (currentKey.Compare(self.MaxKey, cmp) >= 0) return false; } return true; }
public PageIterator(SliceComparer cmp, Page page) { this._cmp = cmp; this._page = page; }
private void HandleUncompressedNodes(DecompressedLeafPage decompressedPage, TreePage p, DecompressionUsage usage) { int numberOfEntries = p.NumberOfEntries; for (var i = 0; i < numberOfEntries; i++) { var uncompressedNode = p.GetNode(i); Slice nodeKey; using (TreeNodeHeader.ToSlicePtr(_tx.Allocator, uncompressedNode, out nodeKey)) { if (uncompressedNode->Flags == TreeNodeFlags.CompressionTombstone) { HandleTombstone(decompressedPage, nodeKey, usage); continue; } if (decompressedPage.HasSpaceFor(_llt, TreeSizeOf.NodeEntry(uncompressedNode)) == false) { throw new InvalidOperationException("Could not add uncompressed node to decompressed page"); } int index; if (decompressedPage.NumberOfEntries > 0) { Slice lastKey; using (decompressedPage.GetNodeKey(_llt, decompressedPage.NumberOfEntries - 1, out lastKey)) { // optimization: it's very likely that uncompressed nodes have greater keys than compressed ones // when we insert sequential keys var cmp = SliceComparer.CompareInline(nodeKey, lastKey); if (cmp > 0) { index = decompressedPage.NumberOfEntries; } else { if (cmp == 0) { // update of the last entry, just decrement NumberOfEntries in the page and // put it at the last position index = decompressedPage.NumberOfEntries - 1; decompressedPage.Lower -= Constants.Tree.NodeOffsetSize; } else { index = decompressedPage.NodePositionFor(_llt, nodeKey); if (decompressedPage.LastMatch == 0) // update { decompressedPage.RemoveNode(index); if (usage == DecompressionUsage.Write) { State.NumberOfEntries--; } } } } } } else { // all uncompressed nodes were compresion tombstones which deleted all entries from the decompressed page index = 0; } switch (uncompressedNode->Flags) { case TreeNodeFlags.PageRef: decompressedPage.AddPageRefNode(index, nodeKey, uncompressedNode->PageNumber); break; case TreeNodeFlags.Data: var pos = decompressedPage.AddDataNode(index, nodeKey, uncompressedNode->DataSize); var nodeValue = TreeNodeHeader.Reader(_llt, uncompressedNode); Memory.Copy(pos, nodeValue.Base, nodeValue.Length); break; case TreeNodeFlags.MultiValuePageRef: throw new NotSupportedException("Multi trees do not support compression"); default: throw new NotSupportedException("Invalid node type to copye: " + uncompressedNode->Flags); } } } }
private Tree(SliceComparer cmp, TreeMutableState state) { _cmp = cmp; _state = state; }
public TreeIterator(Tree tree, Transaction tx, SliceComparer cmp) { _tree = tree; _tx = tx; _cmp = cmp; }