private static bool TryFindSmallValueMergingTwoSections(LowLevelTransaction tx, FixedSizeTree freeSpacetree, long currentSectionId, int num, StreamBitArray current, out long?result) { result = -1; var currentEndRange = current.GetEndRangeCount(); if (currentEndRange == 0) { return(false); } var nextSectionId = currentSectionId + 1; var read = freeSpacetree.Read(nextSectionId); if (!read.HasValue) { return(false); } var next = new StreamBitArray(read.CreateReader()); var nextRange = num - currentEndRange; if (next.HasStartRangeCount(nextRange) == false) { return(false); } if (next.SetCount == nextRange) { freeSpacetree.Delete(nextSectionId); } else { for (int i = 0; i < nextRange; i++) { next.Set(i, false); } freeSpacetree.Add(nextSectionId, next.ToSlice(tx.Allocator)); } if (current.SetCount == currentEndRange) { freeSpacetree.Delete(currentSectionId); } else { for (int i = 0; i < currentEndRange; i++) { current.Set(NumberOfPagesInSection - 1 - i, false); } freeSpacetree.Add(currentSectionId, current.ToSlice(tx.Allocator)); } result = currentSectionId * NumberOfPagesInSection + (NumberOfPagesInSection - currentEndRange); return(true); }
private bool TryFindContinuousRange(LowLevelTransaction tx, FixedSizeTree freeSpaceTree, FixedSizeTree.IFixedSizeIterator it, int num, StreamBitArray current, long currentSectionId, out long?page) { page = -1; var start = -1; var count = 0; for (int i = 0; i < NumberOfPagesInSection; i++) { if (current.Get(i)) { if (start == -1) { start = i; } count++; if (count == num) { page = currentSectionId * NumberOfPagesInSection + start; break; } } else { start = -1; count = 0; } } if (count != num) { return(false); } if (current.SetCount == num) { freeSpaceTree.Delete(it.CurrentKey); } else { for (int i = 0; i < num; i++) { current.Set(i + start, false); } Slice val; using (current.ToSlice(tx.Allocator, out val)) freeSpaceTree.Add(it.CurrentKey, val); } return(true); }
private void FlushPage(long pageNumber, int chunkSize) { var chunkDetails = new ChunkDetails { PageNumber = pageNumber, ChunkSize = chunkSize }; using (Slice.External(_parent._tx.Allocator, (byte *)&chunkDetails, ChunkDetails.SizeOf, out Slice value)) { _tree.Add(_chunkNumber++, value); } }
private void RecordChunkPage(long pageNumber, int chunkSize) { var chunkDetails = new ChunkDetails { PageNumber = pageNumber, ChunkSize = chunkSize }; ((StreamPageHeader *)_currentPage.Pointer)->ChunkSize = chunkSize; using (Slice.External(_parent._tx.Allocator, (byte *)&chunkDetails, ChunkDetails.SizeOf, out Slice value)) { _tree.Add(_chunkNumber++, value); } }
public void Invalid_usage_of_DirectAdds() { var numberOfItems = 100; ushort valueSize = Constants.Storage.PageSize / 16; Slice fstName; using (Slice.From(Allocator, "ccc", out fstName)) using (var tx = Env.WriteTransaction()) { var parent = tx.CreateTree("parent"); var allocator = new NewPageAllocator(tx.LowLevelTransaction, parent); allocator.Create(); for (int i = 0; i < 6; i++) { parent.Add($"aaaaa-{i}", new byte[1000]); } parent.Add($"dummy-8", new byte[1300]); for (int i = 0; i < 6; i++) { parent.Delete($"aaaaa-{i}"); } for (int i = 0; i < NewPageAllocator.NumberOfPagesInSection - 1; i++) { allocator.AllocateSinglePage(0); } var fst = new FixedSizeTree(tx.LowLevelTransaction, parent, fstName, valueSize, newPageAllocator: allocator); var bytes = new byte[valueSize]; Slice val; using (Slice.From(Allocator, bytes, out val)) { for (var i = 0; i < numberOfItems; i++) { fst.Add(i, val); } } tx.Commit(); } }
private long?TryFindLargeValue(LowLevelTransaction tx, FixedSizeTree freeSpaceTree, FixedSizeTree.IFixedSizeIterator it, int num) { int numberOfNeededFullSections = num / NumberOfPagesInSection; int numberOfExtraBitsNeeded = num % NumberOfPagesInSection; var info = new FoundSectionsInfo(); do { var stream = it.CreateReaderForCurrent(); { var current = new StreamBitArray(stream); var currentSectionId = it.CurrentKey; //need to find full free pages if (current.SetCount < NumberOfPagesInSection) { info.Clear(); continue; } //those sections are not following each other in the memory if (info.StartSectionId != null && currentSectionId != info.StartSectionId + info.Sections.Count) { info.Clear(); } //set the first section of the sequence if (info.StartSection == -1) { info.StartSection = it.CurrentKey; info.StartSectionId = currentSectionId; } info.Sections.Add(it.CurrentKey); if (info.Sections.Count != numberOfNeededFullSections) { continue; } //we found enough full sections now we need just a bit more if (numberOfExtraBitsNeeded == 0) { foreach (var section in info.Sections) { freeSpaceTree.Delete(section); } return(info.StartSectionId * NumberOfPagesInSection); } StreamBitArray next; var nextSectionId = currentSectionId + 1; Slice read; using (freeSpaceTree.Read(nextSectionId, out read)) { if (!read.HasValue) { //not a following next section info.Clear(); continue; } next = new StreamBitArray(read.CreateReader()); } if (next.HasStartRangeCount(numberOfExtraBitsNeeded) == false) { //not enough start range count info.Clear(); continue; } //mark selected bits to false if (next.SetCount == numberOfExtraBitsNeeded) { freeSpaceTree.Delete(nextSectionId); } else { for (int i = 0; i < numberOfExtraBitsNeeded; i++) { next.Set(i, false); } Slice val; using (next.ToSlice(tx.Allocator, out val)) freeSpaceTree.Add(nextSectionId, val); } foreach (var section in info.Sections) { freeSpaceTree.Delete(section); } return(info.StartSectionId * NumberOfPagesInSection); } } while (it.MoveNext()); return(null); }