public void TestReadOnly() { using (TempFile file = new TempFile()) { var opt = new BPlusTree<int, int>.Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.Always, FileName = file.TempPath, }; using (BPlusTree<int, int> tree = new BPlusTree<int, int>(opt)) { tree.Add(1, 2); tree.Add(3, 4); tree.Add(5, 6); } opt.CreateFile = CreatePolicy.Never; opt.ReadOnly = true; using (BPlusTree<int, int> tree = new BPlusTree<int, int>(opt)) { Assert.AreEqual(tree[1], 2); Assert.AreEqual(tree[3], 4); Assert.AreEqual(tree[5], 6); try { tree[1] = 0; Assert.Fail(); } catch (InvalidOperationException) { } try { tree.Remove(1); Assert.Fail(); } catch (InvalidOperationException) { } } } }
public void BPlusTreeDemo() { var options = new BPlusTree<string, DateTime>.OptionsV2(PrimitiveSerializer.String, PrimitiveSerializer.DateTime); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = Path.GetTempFileName(); using (var tree = new BPlusTree<string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { tree.Add(file.FullName, file.LastWriteTimeUtc); } } options.CreateFile = CreatePolicy.Never; using (var tree = new BPlusTree<string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { DateTime cmpDate; if (!tree.TryGetValue(file.FullName, out cmpDate)) Console.WriteLine("New file: {0}", file.FullName); else if (cmpDate != file.LastWriteTimeUtc) Console.WriteLine("Modified: {0}", file.FullName); tree.Remove(file.FullName); } foreach (var item in tree) { Console.WriteLine("Removed: {0}", item.Key); } } }
static void Insert(BPlusTree<Guid, TestInfo> tree, IDictionary<Guid, TestInfo> testdata, int threads, int count, TimeSpan wait) { using (var work = new WorkQueue<IEnumerable<KeyValuePair<Guid, TestInfo>>>(tree.AddRange, threads)) { foreach (var set in TestInfo.CreateSets(threads, count, testdata)) work.Enqueue(set); work.Complete(false, wait == TimeSpan.MaxValue ? Timeout.Infinite : (int)Math.Min(int.MaxValue, wait.TotalMilliseconds)); } }
// key, data, added, last_used /// <summary> /// Initialize the cache. /// </summary> /// <param name="capacity">Maximum number of entries.</param> /// <param name="evictCount">Number to evict when capacity is reached.</param> /// <param name="debug">Enable or disable console debugging.</param> public LRUCacheBTree(int capacity, int evictCount, bool debug) { Capacity = capacity; EvictCount = evictCount; Debug = debug; Cache = new BPlusTree<string, Tuple<object, DateTime, DateTime>>(); Cache.EnableCount(); if (EvictCount > Capacity) { throw new ArgumentException("Evict count must be less than or equal to capacity."); } }
BPlusTree<Guid, TestInfo>.OptionsV2 GetOptions(TempFile temp) { BPlusTree<Guid, TestInfo>.OptionsV2 options = new BPlusTree<Guid, TestInfo>.OptionsV2( PrimitiveSerializer.Guid, new TestInfoSerializer()); options.CalcBTreeOrder(Marshal.SizeOf(typeof(Guid)), Marshal.SizeOf(typeof(TestInfo))); options.CreateFile = CreatePolicy.IfNeeded; options.FileName = temp.TempPath; // The following three options allow for automatic commit/recovery: options.CallLevelLock = new ReaderWriterLocking(); options.TransactionLogFileName = Path.ChangeExtension(options.FileName, ".tlog"); return options; }
public void TestICloneable() { ICloneable opt = new BPlusTree<int, int>.Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.IfNeeded, BTreeOrder = 4 }; BPlusTree<int, int>.Options options = (BPlusTree<int, int>.Options)opt.Clone(); Assert.AreEqual(CreatePolicy.IfNeeded, options.CreateFile); Assert.AreEqual(4, options.MaximumChildNodes); Assert.AreEqual(4, options.MaximumValueNodes); }
public void TestReadOnlyCopy() { using (var tempFile = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.CreateFile = CreatePolicy.Never; readcopy.ReadOnly = true; using (var tree = new BPlusTree<int, string>(options)) { using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } //insert some data... tree.AddRange(MakeValues(0, 100)); using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } tree.Commit(); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tree.AddRange(MakeValues(1000, 1000)); using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(100, copy.Count); Assert.AreEqual(0, copy.First().Key); Assert.AreEqual(99, copy.Last().Key); } tree.Commit(); } } }
void AddRandomKeys(int count, Dictionary <int, string> keys, BPlusTree <int, string> data) { Stopwatch time = new Stopwatch(); time.Start(); for (int i = 0; i < count; i++) { int key = Random.Next(int.MaxValue); if (data.TryAdd(key, key.ToString())) { keys.Add(key, key.ToString()); } } Trace.TraceInformation("Added {0} in {1}", count, time.ElapsedMilliseconds); }
/// <summary> /// Release phase of MainDom /// </summary> /// <remarks> /// This will execute on a threadpool thread /// </remarks> private void MainDomRelease() { _logger.LogDebug("Releasing from MainDom..."); lock (_storesLock) { _logger.LogDebug("Releasing content store..."); _contentStore?.ReleaseLocalDb(); // null check because we could shut down before being assigned _localContentDb = null; _logger.LogDebug("Releasing media store..."); _mediaStore?.ReleaseLocalDb(); // null check because we could shut down before being assigned _localMediaDb = null; _logger.LogInformation("Released from MainDom"); } }
int StartAndAbortWriters(BPlusTreeOptions <KeyInfo, DataValue> options, TempFile copy) { RecordsCreated = 0; int minRecordCreated; BPlusTree <KeyInfo, DataValue> dictionary = new BPlusTree <KeyInfo, DataValue>(options); try { using (WorkQueue work = new WorkQueue(Environment.ProcessorCount)) { Exception lastError = null; work.OnError += delegate(object o, ErrorEventArgs e) { lastError = e.GetException(); }; Thread.Sleep(1); for (int i = 0; i < Environment.ProcessorCount; i++) { work.Enqueue(new ThreadedTest(dictionary, 10000000).Run); } while (RecordsCreated < 1000) { Thread.Sleep(1); } minRecordCreated = Interlocked.CompareExchange(ref RecordsCreated, 0, 0); if (copy != null) { File.Copy(options.FileName, copy.TempPath); //just grab a copy any old time. } work.Complete(false, 0); //hard-abort all threads //if(lastError != null) // Assert.AreEqual(typeof(InvalidDataException), lastError.GetType()); } // force the file to close without disposing the btree IDisposable tmp = (IDisposable) new PropertyValue(dictionary, "_storage").Value; tmp.Dispose(); } catch { dictionary.Dispose(); throw; } return(minRecordCreated); }
public void TestAbortWritersAndRecover() { BPlusTree <KeyInfo, DataValue> .Options options = new BPlusTree <KeyInfo, DataValue> .Options( new KeyInfoSerializer(), new DataValueSerializer(), new KeyInfoComparer()); options.CalcBTreeOrder(32, 300); options.FileName = TempFile.TempPath; options.CreateFile = CreatePolicy.Always; using (TempFile copy = new TempFile()) { copy.Delete(); int minRecordCreated = StartAndAbortWriters(options, copy); using (TempFile.Attach(copy.TempPath + ".recovered")) //used to create the new copy using (TempFile.Attach(copy.TempPath + ".deleted")) //renamed existing file { options.CreateFile = CreatePolicy.Never; int recoveredRecords = BPlusTree <KeyInfo, DataValue> .RecoverFile(options); if (recoveredRecords < RecordsCreated) { Assert.Fail("Unable to recover records, recieved ({0} of {1}).", recoveredRecords, RecordsCreated); } options.FileName = copy.TempPath; recoveredRecords = BPlusTree <KeyInfo, DataValue> .RecoverFile(options); Assert.IsTrue(recoveredRecords >= minRecordCreated, "Expected at least " + minRecordCreated + " found " + recoveredRecords); using (BPlusTree <KeyInfo, DataValue> dictionary = new BPlusTree <KeyInfo, DataValue>(options)) { dictionary.EnableCount(); Assert.AreEqual(recoveredRecords, dictionary.Count); foreach (KeyValuePair <KeyInfo, DataValue> kv in dictionary) { Assert.AreEqual(kv.Key.UID, kv.Value.Key.UID); dictionary.Remove(kv.Key); } Assert.AreEqual(0, dictionary.Count); } } } }
public BPlusTree <int, string> Create() { TempFile = TempFile ?? new TempFile(); BPlusTree <int, string> tree = new BPlusTree <int, string>( new BPlusTree <int, string> .Options(PrimitiveSerializer.Instance, PrimitiveSerializer.Instance) { BTreeOrder = 16, LockingFactory = new IgnoreLockFactory(), FileName = TempFile.TempPath, CreateFile = CreatePolicy.Always, } ); tree.EnableCount(); return(tree); }
public unsafe void Prepare(IReadService readService, out OperationStartupData data) { Block block = readService.Read(BlockType.NodeHeaderBlock, commonAddress); fixed(byte *p = block.Data) { NodeCommonHeader *header = (NodeCommonHeader *)p; version = header->CurrentVersionNumber; versionTree = new BPlusTree(header->VersionsBTree); } // We estimate how must we need. uint allocations = 15; //< Approximate estimate. data = new OperationStartupData(commonAddress, allocations, true); }
// //You can use the following additional attributes as you write your tests: // //Use ClassInitialize to run code before running the first test in the class //[ClassInitialize()] //public static void MyClassInitialize(TestContext testContext) //{ //} // //Use ClassCleanup to run code after all tests in a class have run //[ClassCleanup()] //public static void MyClassCleanup() //{ //} // //Use TestInitialize to run code before running each test //[TestInitialize()] //public void MyTestInitialize() //{ //} // //Use TestCleanup to run code after each test has run //[TestCleanup()] //public void MyTestCleanup() //{ //} // #endregion /// <summary> ///A test for Add ///</summary> public void AddTestHelper <T>() { int degree = 2; BPlusTree <double> target = new BPlusTree <double>(degree); double data = 6; target.Add(1); target.Add(2); target.Add(3); target.Add(4); target.Add(5); target.Add(6); target.Add(data); SearchResult actual = target.Contains(data); Assert.IsNotNull(actual.SearchPath); }
public void TestMillionRecords() { int count = 1000000; var r = new Random(); var pairs = new Dictionary <int, int>(); for (int i = 0; i < count; i++) { pairs[i] = r.Next(1000000); } var nodeStorage = new BPlusTreeNodeMemoryStorage <ComparableKeyOf <String> >(250); var tree = new BPlusTree <ComparableKeyOf <String>, ValueOf <String> >(nodeStorage, new MemoryValueStorage <ValueOf <String> >()); foreach (var pair in pairs) { tree.Set(pair.Key.ToString(CultureInfo.InvariantCulture), pair.Value.ToString(CultureInfo.InvariantCulture)); } var removedPairs = new Dictionary <int, int>(); foreach (var pair in pairs) { if (r.NextDouble() > 0.5) { removedPairs.Add(pair.Key, pair.Value); tree.Remove(pair.Key.ToString(CultureInfo.InvariantCulture)); } } foreach (var pair in pairs) { string value = tree.Get(pair.Key.ToString(CultureInfo.InvariantCulture)); if (removedPairs.ContainsKey(pair.Key)) { Assert.IsNull(value); } else { Assert.AreEqual(pair.Value.ToString(CultureInfo.InvariantCulture), value); } } }
public void TestRandomOperations() { int capacity = 10; var nodeStorage = new BPlusTreeNodeMemoryStorage <ComparableKeyOf <Int32> >(capacity); var tree = new BPlusTree <ComparableKeyOf <Int32>, ValueOf <Int32> >(nodeStorage, new MemoryValueStorage <ValueOf <Int32> >()); int keyCount = capacity * capacity * capacity; int operationCount = keyCount * keyCount; var flags = new bool[keyCount]; var r = new Random(); for (int i = 0; i < operationCount; i++) { var nextIndex = r.Next(keyCount); if (flags[nextIndex]) { tree.Remove(nextIndex); } else { tree.Set(nextIndex, nextIndex); } flags[nextIndex] = !flags[nextIndex]; } for (int i = 0; i < keyCount; i++) { var value = tree.Get(i); if (flags[i]) { Assert.AreEqual(i, value.Value); } else { Assert.IsNull(value); } } Assert.AreEqual(flags.Count(f => f), tree.Count()); string message; Assert.IsTrue(tree.CheckConsistency(out message)); }
public void Edit(Author_DocumentEntity a_d) { if (a_d.Author_id == null) { throw new ArgumentNullException(); } foreach (var item in a_d.Author_id) { if (!Char.IsNumber(item)) { throw new FormatException(); } } if (a_d.Document_id == null) { throw new ArgumentNullException(); } foreach (var item in a_d.Document_id) { if (!Char.IsNumber(item)) { throw new FormatException(); } } Delete(a_d.A_D_id); options.CalcBTreeOrder(4, 4); options.CreateFile = CreatePolicy.Never; options.FileName = Path.GetFileName("C:/Users/Дмитрий/Desktop/DMDProject/DMDProject/TreeAuthor_Document"); using (FileStream fs = new FileStream("C:/Users/Дмитрий/Desktop/DMDProject/DMDProject/Author_Document", FileMode.OpenOrCreate)) { using (var tree = new BPlusTree <int, int>(options)) { tree.Add(Convert.ToInt32(a_d.A_D_id), (int)fs.Length); } fs.Seek((int)fs.Length, 0); using (BinaryWriter bw = new BinaryWriter(fs)) { bw.Write(Convert.ToInt32(a_d.A_D_id)); bw.Write(Convert.ToInt32(a_d.Author_id)); bw.Write(Convert.ToInt32(a_d.Document_id)); } } }
public void LastIndexOfTest() { var tree = new BPlusTree <int>(3); for (int j = 0; j < 10; ++j) { for (int i = 0; i < 5; ++i) { tree.Add(j); } } for (int j = 0; j < 10; ++j) { Assert.AreEqual(tree.LastIndexOf(j), j * 5 + 4); } Assert.AreEqual(tree.LastIndexOf(10), -1); }
public Collector(string directory, IxInfo ix, IScoringScheme scorer) { _directory = directory; _ix = ix; _scorer = scorer; var initTimer = Time(); var dbOptions = new BPlusTree <Term, DocumentPosting[]> .OptionsV2( new TermSerializer(), new ArraySerializer <DocumentPosting>(new PostingSerializer()), new TermComparer()); dbOptions.FileName = Path.Combine(directory, string.Format("{0}-{1}.{2}", _ix.Name, "pos", "db")); dbOptions.ReadOnly = true; _postingDb = new BPlusTree <Term, DocumentPosting[]>(dbOptions); Log.DebugFormat("init collector in {0}", initTimer.Elapsed); }
public void Insert_IncreasingOrder() { for (int maxDegree = 3; maxDegree <= 101; maxDegree++) { BPlusTree <long, long> bPlusTree = new BPlusTree <long, long>(maxDegree); var itemsToInsert = GetIncreasingCollection(NUMBER_OF_INSERTION); foreach (var item in itemsToInsert) { var k = (long)item; var v = (long)item; bPlusTree.Insert(k, v); } Assert.IsTrue(Helpers.CheckNodes(bPlusTree.Root)); Assert.AreEqual(NUMBER_OF_INSERTION, bPlusTree.Count); CollectionAssert.AreEqual(itemsToInsert, DumpKeysOnLeafNodes(bPlusTree)); } }
public void TestAtomicAdd() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); int[] counter = new int[] { -1 }; for (int i = 0; i < 100; i++) { Assert.IsTrue(data.TryAdd(i, k => (++counter[0]).ToString())); } Assert.AreEqual(100, data.Count); Assert.AreEqual(100, counter[0] + 1); //Inserts of existing keys will not call method Assert.IsFalse(data.TryAdd(50, k => { throw new InvalidOperationException(); })); Assert.AreEqual(100, data.Count); } }
public List <KeywordMatch <TKey, TValue> > Update(TKey keyword, KeywordMatchMode matchMode, TValue newValue, UpdateMode updateMode) { List <KeywordMatch <TKey, TValue> > matches = new List <KeywordMatch <TKey, TValue> >(); try { BPlusTree <TKey, TValue> .LogInfo("Begin updating keyword '" + keyword.ToString() + "'"); this.performSearch(keyword, matchMode, false, false, matches); foreach (KeywordMatch <TKey, TValue> match in matches) { match.Node.Values[match.Index].Update(newValue, updateMode); match.Node.Write(); } } finally { BPlusTree <TKey, TValue> .LogInfo("End updating keyword '" + keyword.ToString() + "'"); } return(matches); }
public void TestErrorsOnInsertAndDelete() { const int CountPerThread = 100; BPlusTree <KeyInfo, DataValue> .OptionsV2 options = new BPlusTree <KeyInfo, DataValue> .OptionsV2( new KeyInfoSerializer(), new DataValueSerializer(), new KeyInfoComparer()); options.CalcBTreeOrder(32, 300); options.FileName = TempFile.TempPath; options.CreateFile = CreatePolicy.Always; using (BPlusTree <KeyInfo, DataValue> dictionary = new BPlusTree <KeyInfo, DataValue>(options)) using (WorkQueue work = new WorkQueue(Environment.ProcessorCount)) { Exception lastError = null; work.OnError += delegate(object o, ErrorEventArgs e) { lastError = e.GetException(); }; for (int i = 0; i < Environment.ProcessorCount; i++) { work.Enqueue(new ThreadedTest(dictionary, CountPerThread).Run); } for (int i = 0; i < CountPerThread; i++) { if (i % 2 == 0) { try { dictionary.TryAdd(new KeyInfo(Guid.NewGuid(), i), k => { throw new ExpectedException(); }); } catch { } } else { try { dictionary.TryRemove(dictionary.First().Key, (k, v) => { throw new ExpectedException(); }); } catch { } } } Assert.IsTrue(work.Complete(true, 60000)); Assert.IsNull(lastError, "Exception raised in worker: {0}", lastError); } }
public static void RandomOverlaps(string outputPath, string TestName) { int regionCount = 200000; int sampleCount = 300; Random rnd = new Random(); Stopwatch stopWatch = new Stopwatch(); if (!Directory.Exists(outputPath + Path.DirectorySeparatorChar)) { Directory.CreateDirectory(outputPath + Path.DirectorySeparatorChar); } StreamWriter writer = new StreamWriter(outputPath + Path.DirectorySeparatorChar + "speed" + TestName + ".txt"); writer.WriteLine("Di3 indexing speed test: " + TestName); writer.WriteLine("Speed(interval/sec)\tET\tET(ms)"); BPlusTree <int, int> .OptionsV2 options = new BPlusTree <int, int> .OptionsV2(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32); options.CreateFile = CreatePolicy.Always; options.FileName = outputPath + Path.DirectorySeparatorChar + "speed" + TestName; LargeInsertionAddUpdateValue update = new LargeInsertionAddUpdateValue(); using (var tree = new BPlusTree <int, int>(options)) { for (int sample = 0; sample < sampleCount; sample++) { Console.WriteLine("processing sample : {0:N0}", sample); stopWatch.Restart(); for (int region = 0; region < regionCount; region++) { update.Value = rnd.Next(0, Int32.MaxValue); tree.AddOrUpdate(rnd.Next(10, 12), ref update); } stopWatch.Stop(); Console.WriteLine(".::. Writting Speed : {0} intervals\\sec", Math.Round(regionCount / stopWatch.Elapsed.TotalSeconds, 2)); Console.WriteLine(""); writer.WriteLine(Math.Round(regionCount / stopWatch.Elapsed.TotalSeconds, 2).ToString() + "\t" + stopWatch.Elapsed.ToString() + "\t" + stopWatch.ElapsedMilliseconds.ToString()); writer.Flush(); } } }
/// <summary> /// Checks whether the tree has B+ Tree properties. /// </summary> /// <param name="tree">A B + tree. </param> /// <param name="expectedTotalKeyCount">The expected number of keys (duplicate and distinct) in the tree. </param> /// <param name="expectedDistinctKeyCount">The expected number of distinct keys in the tree. </param> /// <param name="expectedNodeCount">The expected number of nodes in the tree. </param> public static void HasBPlusTreeProperties(BPlusTree <int, string> tree, int expectedTotalKeyCount, int expectedDistinctKeyCount, int expectedNodeCount) { Assert.IsTrue(HasBTreeProperties(tree, expectedTotalKeyCount, expectedDistinctKeyCount, expectedNodeCount, HasBPlusTreeNodeProperties)); /* Check that all the non-leaf nodes have no value in their key value array*/ var nodes = new List <BPlusTreeNode <int, string> >(); DFS <BPlusTreeNode <int, string>, int, string>(tree.Root, nodes); foreach (var node in nodes) { if (!node.IsLeaf()) { foreach (KeyValuePair <int, string> keyVal in node.GetKeyValues()) { Assert.AreEqual(default(string), keyVal.Value); } } } }
public void TestInsertAndDeleteInReverseOrder() { const string pageStoreName = "TestInsertAndDeleteInReverseOrder.data"; var value = new byte[64]; ulong rootPageId; const int keyCount = 20000; using (var pageStore = TestUtils.CreateEmptyPageStore(pageStoreName)) { var tree = new BPlusTree(0, pageStore); for (int i = 0; i < keyCount; i++) { tree.Insert(0, (ulong)i, value); } rootPageId = tree.Save(0, null); pageStore.Commit(0, null); } using (var pageStore = TestUtils.OpenPageStore(pageStoreName, false)) { var tree = new BPlusTree(pageStore, rootPageId); for (int i = keyCount - 1; i >= 0; i--) { try { tree.Delete(1, (ulong)i, null); } catch (Exception) { Console.WriteLine("Delete threw exception on key {0}", i); throw; } } rootPageId = tree.Save(1, null); pageStore.Commit(0, null); } using (var pageStore = TestUtils.OpenPageStore(pageStoreName, false)) { var tree = new BPlusTree(pageStore, rootPageId); Assert.AreEqual(0, tree.Scan(null).Count(), "Expected and empty tree after all deletes"); } }
public BPlusTree <int, string> Create() { if (_tempFile != null) { _tempFile.Dispose(); } _tempFile = new TempFile(); BPlusTree <int, string> tree = new BPlusTree <int, string>( new BPlusTree <int, string> .OptionsV2(PrimitiveSerializer.Instance, PrimitiveSerializer.Instance) { CreateFile = CreatePolicy.Always, FileName = _tempFile.TempPath, }.CalcBTreeOrder(4, 10) ); tree.EnableCount(); return(tree); }
public void TestBorrowRight() { using (var pageStore = TestUtils.CreateEmptyPageStore("TestBorrowRight.data")) { var tree = new BPlusTree(pageStore); var buff = new byte[_config.ValueSize]; var txnId = 0ul; for (int i = 0; i <= _config.LeafLoadFactor; i++) { tree.Insert(txnId, (ulong)i, BitConverter.GetBytes((ulong)i)); } tree.Delete(txnId, 13ul, null); tree.Delete(txnId, 12ul, null); // Should force a borrow frrom the right node for (int i = 0; i <= _config.LeafLoadFactor; i++) { Assert.IsTrue(i == 12 ^ i == 13 ^ tree.Search((ulong)i, buff, null)); } } }
public ContentStore(IPublishedSnapshotAccessor publishedSnapshotAccessor, IVariationContextAccessor variationContextAccessor, ILogger logger, BPlusTree <int, ContentNodeKit> localDb = null) { _publishedSnapshotAccessor = publishedSnapshotAccessor; _variationContextAccessor = variationContextAccessor; _logger = logger; _localDb = localDb; _contentNodes = new ConcurrentDictionary <int, LinkedNode <ContentNode> >(); _contentRootNodes = new ConcurrentDictionary <int, LinkedNode <object> >(); _contentTypesById = new ConcurrentDictionary <int, LinkedNode <PublishedContentType> >(); _contentTypesByAlias = new ConcurrentDictionary <string, LinkedNode <PublishedContentType> >(StringComparer.InvariantCultureIgnoreCase); _xmap = new ConcurrentDictionary <Guid, int>(); _genRefRefs = new ConcurrentQueue <GenRefRef>(); _genRefRef = null; // no initial gen exists _liveGen = _floorGen = 0; _nextGen = false; // first time, must create a snapshot _collectAuto = true; // collect automatically by default }
public void TestInsertInReverseOrder() { using (var pageStore = TestUtils.CreateEmptyPageStore("TestInsertInReverseOrder.data")) { var tree = new BPlusTree(pageStore); var txnId = 0ul; for (int i = 100000; i > 0; i--) { tree.Insert(txnId, (ulong)i, BitConverter.GetBytes(i)); } var buff = new byte[_config.ValueSize]; for (int i = 100000; i > 0; i--) { Assert.IsTrue(tree.Search((ulong)i, buff, null)); TestUtils.AssertBuffersEqual(BitConverter.GetBytes(i), buff); } } }
public void SingleSplitRoot() { var tree = new BPlusTree(5, "5"); tree.TryAppendElementToTree(3, "3"); tree.TryAppendElementToTree(10, "10"); Assert.AreEqual(false, tree.Root.IsLeaf); Assert.AreEqual(2, tree.Root.Children.Count); Assert.AreEqual(3, tree.Root.Children[0].KeyValueDictionary.ElementAt(0).Key); Assert.AreEqual(5, tree.Root.Children[1].KeyValueDictionary.ElementAt(0).Key); Assert.AreEqual(10, tree.Root.Children[1].KeyValueDictionary.ElementAt(1).Key); Assert.AreEqual(tree.Root, tree.Root.Children[0].Parent); Assert.AreEqual(tree.Root, tree.Root.Children[1].Parent); Assert.AreEqual(null, tree.Root.Children[0].PreviousLeaf); Assert.AreEqual(tree.Root.Children[1], tree.Root.Children[0].NextLeaf); Assert.AreEqual(null, tree.Root.Children[1].NextLeaf); Assert.AreEqual(tree.Root.Children[0], tree.Root.Children[1].PreviousLeaf); }
public void TestBulkInsertSorted() { Dictionary <int, string> test = new Dictionary <int, string>(); IEnumerable <KeyValuePair <int, string> >[] sets = new List <IEnumerable <KeyValuePair <int, string> > >(CreateSets(1, 1000, test)).ToArray(); using (BPlusTree <int, string> tree = new BPlusTree <int, string>(Options)) { tree.BulkInsert( new OrderedKeyValuePairs <int, string>(sets[0]), new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins, InputIsSorted = true } ); VerifyDictionary(test, tree); } }
internal CoverSummit_mid( object lockOnMe, BPlusTree <C, B> di4_1R, BPlusTree <BlockKey <C>, BlockValue> di4_2R, IOutput <C, I, M, O> outputStrategy, BlockKey <C> left, BlockKey <C> right, int minAcc, int maxAcc) { _di4_1R = di4_1R; _di4_2R = di4_2R; _left = left; _right = right; _minAcc = minAcc; _maxAcc = maxAcc; _regionType = RegionType.Candidate; _decompositionStack = new DecompositionStack <C, I, M, O>(outputStrategy, lockOnMe); }
public void TestCloneWithCallLockV1() { var options = new BPlusTree<int, int>.Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.IfNeeded, BTreeOrder = 4 }; var copy = options.Clone(); Assert.IsFalse(Object.ReferenceEquals(options, copy)); Assert.IsTrue(Object.ReferenceEquals(options.CallLevelLock, copy.CallLevelLock)); //If we get/set the lock prior to clone we will have the same lock instance. options.CallLevelLock = new SimpleReadWriteLocking(); copy = options.Clone(); Assert.IsFalse(Object.ReferenceEquals(options, copy)); Assert.IsTrue(Object.ReferenceEquals(options.CallLevelLock, copy.CallLevelLock)); }
public void TestDeleteFromLeafRoot() { using (var pageStore = TestUtils.CreateEmptyPageStore("TestDeleteFromLeafRoot.data")) { var tree = new BPlusTree(pageStore); var txnId = 0ul; var buff = new byte[_config.ValueSize]; tree.Insert(txnId, 1ul, TestUtils.StringToByteArray("one")); tree.Insert(txnId, 2ul, TestUtils.StringToByteArray("two")); tree.Insert(txnId, 3ul, TestUtils.StringToByteArray("three")); tree.Delete(txnId, 2ul, null); Assert.IsTrue(tree.Search(1ul, buff, null)); TestUtils.AssertBuffersEqual(TestUtils.StringToByteArray("one"), buff); Assert.IsTrue(tree.Search(3ul, buff, null)); TestUtils.AssertBuffersEqual(TestUtils.StringToByteArray("three"), buff); Assert.IsFalse(tree.Search(2ul, buff, null)); } }
public static void Test_AllowUpdates() { var options = new BPlusTree <double, string> .OptionsV2(PrimitiveSerializer.Double, PrimitiveSerializer.String); options.CalcBTreeOrder(16, 24); options.FileBlockSize = 8192; options.CreateFile = CreatePolicy.Always; options.FileName = "I:\\test.tmp"; var sortedContent = new SortedDictionary <double, string>(); sortedContent.Add(10.0, "Demah"); using (var tree = new BPlusTree <double, string>(options)) { tree.Add(10.0, "Hamed"); tree.AddRangeSorted(sortedContent, true); } }
public void TestDeleteAfterGarbageCollection() { System.Threading.ThreadStart fn = delegate() { BPlusTree <int, string> tree = new BPlusTree <int, string>(Options); Assert.IsTrue(tree.TryAdd(1, "hi")); tree = null; }; fn(); //Allow the GC to collect the BTree System.Threading.Thread.Sleep(10); GC.GetTotalMemory(true); GC.WaitForPendingFinalizers(); //Make sure the file has been released TempFile.Delete(); }
void DeleteStuff(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { if (tree.Count > 1000) { int limit = tree.Count - 1000; foreach (Guid key in tree.Keys) { if (!tree.Remove(key)) throw new ApplicationException(); if (--limit <= 0) break; } } else Thread.Sleep(1); } }
public void TestConcurrency() { mreStop.Reset(); using(TempFile temp = new TempFile()) { BPlusTree<Guid, TestInfo>.OptionsV2 options = new BPlusTree<Guid, TestInfo>.OptionsV2( PrimitiveSerializer.Guid, new TestInfoSerializer()); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = temp.TempPath; using (BPlusTree<Guid, TestInfo> tree = new BPlusTree<Guid, TestInfo>(options)) { tree.EnableCount(); var actions = new List<IAsyncResult>(); var tests = new Action<BPlusTree<Guid, TestInfo>>[] { DeleteStuff, UpdateStuff, AddStuff, AddRanges, BulkyInserts, FetchStuff, FetchStuff, FetchStuff, FetchStuff, FetchStuff }; foreach (var t in tests) actions.Add(t.BeginInvoke(tree, null, null)); const int waitIterations = 8; // wait for n seconds int timesWaited = 0; do { Trace.TraceInformation("Dictionary.Count = {0}", tree.Count); Thread.Sleep(1000); timesWaited++; } while (timesWaited<waitIterations && Debugger.IsAttached); mreStop.Set(); for (int i = 0; i < actions.Count; i++) { tests[i].EndInvoke(actions[i]); } Trace.TraceInformation("Dictionary.Count = {0}", tree.Count); } } }
public void TestCloneWithCallLockV1() { var options = new BPlusTree <int, int> .Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.IfNeeded, BTreeOrder = 4 }; var copy = options.Clone(); Assert.IsFalse(Object.ReferenceEquals(options, copy)); Assert.IsTrue(Object.ReferenceEquals(options.CallLevelLock, copy.CallLevelLock)); //If we get/set the lock prior to clone we will have the same lock instance. options.CallLevelLock = new SimpleReadWriteLocking(); copy = options.Clone(); Assert.IsFalse(Object.ReferenceEquals(options, copy)); Assert.IsTrue(Object.ReferenceEquals(options.CallLevelLock, copy.CallLevelLock)); }
public void TestWriteToTemporaryCopy() { Dictionary<Guid, TestInfo> first, data = new Dictionary<Guid, TestInfo>(); using(TempFile temp = new TempFile()) { temp.Delete(); var options = GetOptions(temp); options.TransactionLogFileName = Path.ChangeExtension(options.FileName, ".tlog"); using (var tree = new BPlusTree<Guid, TestInfo>(options)) { Insert(tree, data, 1, 100, TimeSpan.MaxValue); TestInfo.AssertEquals(data, tree); Assert.IsFalse(temp.Exists); } // All data commits to output file Assert.IsTrue(temp.Exists); TestInfo.AssertEquals(data, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); first = new Dictionary<Guid, TestInfo>(data); using (var tree = new BPlusTree<Guid, TestInfo>(options)) { Insert(tree, data, 1, 100, TimeSpan.MaxValue); //We are writing to a backup, the original file still contains 100 items: TestInfo.AssertEquals(first, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); //Commit the changes and the original file will now contain our changes: tree.CommitChanges(); TestInfo.AssertEquals(data, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); //Add a few more records... Insert(tree, data, 1, 100, TimeSpan.MaxValue); } //Dispose of the tree will commit changes... TestInfo.AssertEquals(data, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); } }
void SequencedTest(int start, int incr, int stop, string name) { int count = Math.Abs(start - stop)/Math.Abs(incr); const string myTestValue1 = "T1", myTestValue2 = "t2"; string test; using (BPlusTree<int, string> data = new BPlusTree<int, string>(Options)) { Stopwatch time = new Stopwatch(); time.Start(); //large order-forward for (int i = start; i != stop; i += incr) if (!data.TryAdd(i, myTestValue1)) throw new ApplicationException(); Trace.TraceInformation("{0} insert {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.TryGetValue(i, out test) || test != myTestValue1) throw new ApplicationException(); Trace.TraceInformation("{0} seek {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.TryUpdate(i, myTestValue2)) throw new ApplicationException(); Trace.TraceInformation("{0} modify {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.TryGetValue(i, out test) || test != myTestValue2) throw new ApplicationException(); Trace.TraceInformation("{0} seek#2 {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); int tmpCount = 0; foreach (KeyValuePair<int, string> tmp in data) if (tmp.Value != myTestValue2) throw new ApplicationException(); else tmpCount++; if (tmpCount != count) throw new ApplicationException(); Trace.TraceInformation("{0} foreach {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.Remove(i)) throw new ApplicationException(); Trace.TraceInformation("{0} delete {1} in {2}", name, count, time.ElapsedMilliseconds); for (int i = start; i != stop; i += incr) if (data.TryGetValue(i, out test)) throw new ApplicationException(); } }
/// <summary> /// Clear the cache. /// </summary> public void Clear() { lock (CacheLock) { Cache = new BPlusTree<string, Tuple<object, DateTime, DateTime>>(); Cache.EnableCount(); return; } }
void TestRecoveryOnExisting(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> dataFirst, data = new Dictionary<Guid, TestInfo>(); try { temp.Delete(); Assert.IsNotNull(options.TransactionLog); using (tree = new BPlusTree<Guid, TestInfo>(options)) { Insert(tree, data, 1, 100, TimeSpan.MaxValue); TestInfo.AssertEquals(data, tree); Assert.IsFalse(temp.Exists); } tree = null; Assert.IsTrue(File.Exists(options.TransactionLogFileName)); // All data commits to output file Assert.IsTrue(temp.Exists); TestInfo.AssertEquals(data, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); dataFirst = new Dictionary<Guid, TestInfo>(data); DateTime modified = temp.Info.LastWriteTimeUtc; tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable) new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //Still only contains original data Assert.AreEqual(modified, temp.Info.LastWriteTimeUtc); TestInfo.AssertEquals(dataFirst, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
void TestRecoveryOnNew(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> data = new Dictionary<Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable)new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No file... yet... Assert.IsFalse(File.Exists(options.FileName)); //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } Assert.IsTrue(File.Exists(options.FileName)); } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
void BulkyInserts(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { tree.BulkInsert(CreateData(100)); AddIdle(tree); Thread.Sleep(100); } }
void AddStuff(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { foreach(var pair in CreateData(100)) tree.Add(pair.Key, pair.Value); AddIdle(tree); } }
public void TestRestoreLargeLog() { using (TempFile savelog = new TempFile()) using (TempFile temp = new TempFile()) { var options = GetOptions(temp); options.FileBlockSize = 512; options.StoragePerformance = StoragePerformance.Fastest; options.CalcBTreeOrder(Marshal.SizeOf(typeof(Guid)), Marshal.SizeOf(typeof(TestInfo))); options.TransactionLog = new TransactionLog<Guid, TestInfo>( new TransactionLogOptions<Guid, TestInfo>( options.TransactionLogFileName, options.KeySerializer, options.ValueSerializer ) ); //Now recover... Dictionary<Guid, TestInfo> first = new Dictionary<Guid, TestInfo>(); Dictionary<Guid, TestInfo> sample; using (var tree = new BPlusTree<Guid, TestInfo>(options)) { tree.EnableCount(); Insert(tree, first, 1, 100, TimeSpan.FromMinutes(1)); tree.Commit(); Assert.AreEqual(100, tree.Count); sample = new Dictionary<Guid, TestInfo>(first); Insert(tree, sample, 7, 5000, TimeSpan.FromMinutes(1)); Assert.AreEqual(35100, tree.Count); for (int i = 0; i < 1; i++) { foreach (var rec in tree) { var value = rec.Value; value.UpdateCount++; value.ReadCount++; tree[rec.Key] = value; } } File.Copy(options.TransactionLog.FileName, savelog.TempPath, true); tree.Rollback(); TestInfo.AssertEquals(first, tree); } //file still has initial committed data TestInfo.AssertEquals(first, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); //restore the log and verify all data. File.Copy(savelog.TempPath, options.TransactionLog.FileName, true); using (var tree = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(sample, tree); } //file still has initial committed data TestInfo.AssertEquals(sample, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); } }
void FetchStuff(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { foreach (Guid k in tree.Keys) { TestInfo ti; if (tree.TryGetValue(k, out ti) && ti.MyKey != k) throw new ApplicationException(); } } }
void UpdateStuff(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { foreach (var pair in tree) { bool updated = tree.TryUpdate(pair.Key, (k, v) => { v.UpdateCount++; return v; }); if (!updated && tree.ContainsKey(pair.Key)) throw new ApplicationException(); } } }
private void AddIdle(BPlusTree<Guid, TestInfo> tree) { int size = tree.Count; if (size > 100000) Thread.Sleep(size - 100000); }
void AddRanges(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { tree.AddRange(CreateData(100)); AddIdle(tree); } }
void TestRecoveryOnNew(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> data = new Dictionary<Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable)new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No data... yet... using(TempFile testempty = TempFile.FromCopy(options.FileName)) { var testoptions = options.Clone(); testoptions.TransactionLogFileName = null; testoptions.TransactionLog = null; testoptions.FileName = testempty.TempPath; using (var empty = new BPlusTree<Guid, TestInfo>(testoptions)) { empty.EnableCount(); Assert.AreEqual(0, empty.Count); } } //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
private static void VerifyDictionary(Dictionary<int, string> expected, BPlusTree<int, string> tree) { tree.Validate(); tree.EnableCount(); Dictionary<int, string> test = new Dictionary<int, string>(expected); List<KeyValuePair<int, string>> pairs = new List<KeyValuePair<int, string>>(test); string val; foreach (KeyValuePair<int, string> pair in tree) { Assert.IsTrue(test.TryGetValue(pair.Key, out val)); Assert.AreEqual(pair.Value, val); Assert.IsTrue(test.Remove(pair.Key)); } Assert.AreEqual(0, test.Count); test = null; Assert.IsNull(test); Assert.AreEqual(pairs.Count, tree.Count); foreach (KeyValuePair<int, string> pair in pairs) { Assert.IsTrue(tree.TryGetValue(pair.Key, out val)); Assert.AreEqual(pair.Value, val); } }
public void TestSyncFromLogging() { using (var tempFile = new TempFile()) using (var logfile = new TempFile()) using (var tempCopy = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, TransactionLogFileName = logfile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.FileName = tempCopy.TempPath; readcopy.StoragePerformance = StoragePerformance.Fastest; using (var tree = new BPlusTree<int, string>(options)) using (var copy = new BPlusTree<int, string>(readcopy)) using (var tlog = new TransactionLog<int, string>( new TransactionLogOptions<int, string>(logfile.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String) { ReadOnly = true })) { tree.Add(0, "0"); tree.Commit(); long logpos = 0; copy.EnableCount(); //start by copying the data from tree's file into the copy instance: copy.BulkInsert( BPlusTree<int, string>.EnumerateFile(options), new BulkInsertOptions { InputIsSorted = true, CommitOnCompletion = false, ReplaceContents = true } ); Assert.AreEqual(1, copy.Count); Assert.AreEqual("0", copy[0]); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1, copy.Count); //insert some data... tree.AddRange(MakeValues(1, 99)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(100, copy.Count); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(0, copy.Count); tree.AddRange(MakeValues(1000, 1000)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1000, copy.Count); } } }
public void TestReplaceContents() { Dictionary<int, string> test = new Dictionary<int, string>(); IEnumerable<KeyValuePair<int, string>>[] sets = new List<IEnumerable<KeyValuePair<int, string>>>(CreateSets(1, 1000, test)).ToArray(); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(Options)) { tree.BulkInsert( new OrderedKeyValuePairs<int, string>(sets[0]), new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins, InputIsSorted = true } ); VerifyDictionary(test, tree); // Use bulk insert to overwrite the contents of tree test = new Dictionary<int, string>(); sets = new List<IEnumerable<KeyValuePair<int, string>>>(CreateSets(1, 100, test)).ToArray(); tree.BulkInsert( new OrderedKeyValuePairs<int, string>(sets[0]), new BulkInsertOptions { CommitOnCompletion = false, InputIsSorted = true, ReplaceContents = true, DuplicateHandling = DuplicateHandling.RaisesException, } ); VerifyDictionary(test, tree); } }
public void TestMergeRandomInFile() { BPlusTreeOptions<int, string> options = Options; using (TempFile temp = new TempFile()) { temp.Delete(); options.CreateFile = CreatePolicy.Always; options.FileName = temp.TempPath; options.CalcBTreeOrder(4, 4); Stopwatch sw = Stopwatch.StartNew(); Dictionary<int, string> expected = TestMergeRandom(options, 2, 300); Trace.TraceInformation("Creating {0} nodes in {1}.", expected.Count, sw.Elapsed); sw = Stopwatch.StartNew(); options = Options; options.CreateFile = CreatePolicy.Never; options.FileName = temp.TempPath; options.CalcBTreeOrder(4, 4); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(options)) { VerifyDictionary(expected, tree); } Trace.TraceInformation("Verified {0} nodes in {1}.", expected.Count, sw.Elapsed); } }
public Dictionary<int, string> TestMergeRandom(BPlusTreeOptions<int, string> options, int nsets, int nsize) { Dictionary<int, string> test = new Dictionary<int, string>(); IEnumerable<KeyValuePair<int, string>>[] sets = new List<IEnumerable<KeyValuePair<int, string>>>(CreateSets(nsets, nsize, test)).ToArray(); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(options)) { foreach(IEnumerable<KeyValuePair<int, string>> set in sets) tree.BulkInsert(set, new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins }); VerifyDictionary(test, tree); tree.UnloadCache(); tree.Add(int.MaxValue, "max"); tree.Remove(int.MaxValue); VerifyDictionary(test, tree); } return test; }
public void TestBulkInsertSorted() { Dictionary<int, string> test = new Dictionary<int, string>(); IEnumerable<KeyValuePair<int, string>>[] sets = new List<IEnumerable<KeyValuePair<int, string>>>(CreateSets(1, 1000, test)).ToArray(); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(Options)) { tree.BulkInsert( new OrderedKeyValuePairs<int, string>(sets[0]), new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins, InputIsSorted = true } ); VerifyDictionary(test, tree); } }
void TestMergeSequenceInFile(BPlusTreeOptions<int, string> options, int count) { Dictionary<int, string> expected = new Dictionary<int, string>(); for (int i = 0; i < count; i++) expected.Add(i + 1, i.ToString()); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(options)) { Assert.AreEqual(expected.Count, tree.BulkInsert(expected)); VerifyDictionary(expected, tree); } }