public void TestCommitRollback() { using (BPlusTree <int, string> tree = Create(Options)) { tree.EnableCount(); Assert.AreEqual(0, tree.Count); tree.Rollback(); Assert.AreEqual(0, tree.Count); tree.Commit(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Rollback(); Assert.AreEqual(0, tree.Count); tree.Commit(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Commit(); Assert.AreEqual(1, tree.Count); tree.Rollback(); Assert.AreEqual(1, tree.Count); tree.Add(2, "B"); tree.Rollback(); Assert.AreEqual(1, tree.Count); tree[1] = "abc"; tree.Commit(); Assert.AreEqual(1, tree.Count); tree.Rollback(); Assert.AreEqual("abc", tree[1]); Assert.IsFalse(tree.ContainsKey(2)); } }
public void TestReadOnly() { using (TempFile file = new TempFile()) { var opt = new BPlusTree <int, int> .Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.Always, FileName = file.TempPath, }; using (BPlusTree <int, int> tree = new BPlusTree <int, int>(opt)) { tree.Add(1, 2); tree.Add(3, 4); tree.Add(5, 6); } opt.CreateFile = CreatePolicy.Never; opt.ReadOnly = true; using (BPlusTree <int, int> tree = new BPlusTree <int, int>(opt)) { Assert.AreEqual(tree[1], 2); Assert.AreEqual(tree[3], 4); Assert.AreEqual(tree[5], 6); try { tree[1] = 0; Assert.Fail(); } catch (InvalidOperationException) { } try { tree.Remove(1); Assert.Fail(); } catch (InvalidOperationException) { } } } }
public void TestReadOnly() { using (TempFile file = new TempFile()) { var opt = new BPlusTree<int, int>.Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.Always, FileName = file.TempPath, }; using (BPlusTree<int, int> tree = new BPlusTree<int, int>(opt)) { tree.Add(1, 2); tree.Add(3, 4); tree.Add(5, 6); } opt.CreateFile = CreatePolicy.Never; opt.ReadOnly = true; using (BPlusTree<int, int> tree = new BPlusTree<int, int>(opt)) { Assert.AreEqual(tree[1], 2); Assert.AreEqual(tree[3], 4); Assert.AreEqual(tree[5], 6); try { tree[1] = 0; Assert.Fail(); } catch (InvalidOperationException) { } try { tree.Remove(1); Assert.Fail(); } catch (InvalidOperationException) { } } } }
void CreateIndex() { long offset = 0; if (mh.WriteOffset == 0) { return; } using (MemoryMappedViewStream maData = mmfBlock.CreateViewStream(reservedBytes, mh.WriteOffset)) { using (System.IO.BinaryReader br = new BinaryReader(maData)) { while ((offset = br.BaseStream.Position) != mh.WriteOffset) { bool Deleted = br.ReadBoolean(); br.BaseStream.Seek(3, SeekOrigin.Current); int Length = br.ReadInt32(); if (!Deleted) { byte[] Key = br.ReadBytes(32); index.Add(Key, offset); br.BaseStream.Seek(Length, SeekOrigin.Current); } else { br.BaseStream.Seek(Length + 32, SeekOrigin.Current); } } } } }
public void Rename(string source, string dest) { ContentRecord rec; Check.Assert <ArgumentException>(_index.TryGetValue(source, out rec), "The source was not found."); rec = rec.ToBuilder().SetContentUri(dest).Build(); Check.Assert <ArgumentException>(_index.Add(dest, rec), "The target already exists."); _index.Remove(source); }
public void Remove(AttributeValue value, long rowId, long operationId) { if (!_tree.IsDisposed) { var op = new IndexRemoveOp <long>(value, rowId); _transitionTree.Add(value, op); BoundCheck(value); AddToCommitOps(operationId, op); } }
public void TestEnumeration() { var options = Options; options.BTreeOrder = 4; using (BPlusTree <int, string> data = new BPlusTree <int, string>(options)) { data.EnableCount(); data.DebugSetOutput(new StringWriter()); data.DebugSetValidateOnCheckpoint(true); for (int id = 0; id < 10; id++) { data.Add(id, id.ToString()); } using (IEnumerator <KeyValuePair <int, string> > enu = data.GetEnumerator()) { Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(0, enu.Current.Key); for (int id = 2; id < 10; id++) { Assert.IsTrue(data.Remove(id)); } for (int id = 6; id < 11; id++) { data.Add(id, id.ToString()); } Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(1, enu.Current.Key); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(6, enu.Current.Key); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(7, enu.Current.Key); Assert.IsTrue(data.Remove(8)); Assert.IsTrue(data.Remove(9)); Assert.IsTrue(data.Remove(10)); data.Add(11, 11.ToString()); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(11, enu.Current.Key); Assert.IsTrue(false == enu.MoveNext()); } data.Clear(); } }
/// <summary> ///A test for Remove ///</summary> public void RemoveTestHelper <T>() { int degree = 2; BPlusTree <double> target = new BPlusTree <double>(degree); double data = 6; target.Add(1); target.Add(2); target.Add(3); target.Add(4); target.Add(5); target.Remove(data); SearchResult actual = target.Contains(data); Assert.IsNotNull(actual.SearchPath); }
public void Count_of_filtered_is_equals_to_count_of_linqWhered() { var start = new DateTime(2016, 1, 1); var end = new DateTime(2016, 1, 8); var sw = new Stopwatch(); sw.Start(); var linq = Samples.Where(s => start < s.Value.DateTime && s.Value.DateTime < end).ToArray(); sw.Stop(); Log($"linq whered count={linq.Length}, time={sw.ElapsedMilliseconds}[ms]"); sw.Reset(); var btree = new BPlusTree <DateTime, long>(); foreach (var s in Samples) { btree.Add(s.Value.DateTime, s.Key); } sw.Start(); var result = btree.GtAndLt(start, end).Select(id => Samples[id]).ToArray(); sw.Stop(); Log($"filtered count={result.Length}, time={sw.ElapsedMilliseconds}[ms]"); sw.Reset(); Assert.AreEqual(linq.Length, result.Length); }
public void BPlusTreeDemo() { var options = new BPlusTree<string, DateTime>.OptionsV2(PrimitiveSerializer.String, PrimitiveSerializer.DateTime); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = Path.GetTempFileName(); using (var tree = new BPlusTree<string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { tree.Add(file.FullName, file.LastWriteTimeUtc); } } options.CreateFile = CreatePolicy.Never; using (var tree = new BPlusTree<string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { DateTime cmpDate; if (!tree.TryGetValue(file.FullName, out cmpDate)) Console.WriteLine("New file: {0}", file.FullName); else if (cmpDate != file.LastWriteTimeUtc) Console.WriteLine("Modified: {0}", file.FullName); tree.Remove(file.FullName); } foreach (var item in tree) { Console.WriteLine("Removed: {0}", item.Key); } } }
public Dictionary <int, string> TestMergeRandom(BPlusTreeOptions <int, string> options, int nsets, int nsize) { Dictionary <int, string> test = new Dictionary <int, string>(); IEnumerable <KeyValuePair <int, string> >[] sets = new List <IEnumerable <KeyValuePair <int, string> > >(CreateSets(nsets, nsize, test)).ToArray(); using (BPlusTree <int, string> tree = new BPlusTree <int, string>(options)) { foreach (IEnumerable <KeyValuePair <int, string> > set in sets) { tree.BulkInsert(set, new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins }); } VerifyDictionary(test, tree); tree.UnloadCache(); tree.Add(int.MaxValue, "max"); tree.Remove(int.MaxValue); VerifyDictionary(test, tree); } return(test); }
public void GatRangeTest() { int limit = 100; var rnd = new Random(); var vals = new List <int>(); for (int i = 0; i < limit; ++i) { vals.Add(rnd.Next(limit)); } var tree = new BPlusTree <int>(4); foreach (var v in vals) { tree.Add(v); } var arr = tree.ToArray(); var len = rnd.Next(tree.Count - 1); var str = rnd.Next(tree.Count - len); var v1 = tree.GetRange(str, len); for (int i = 0; i < len; ++i) { Assert.AreEqual(v1[i], arr[str + i]); } }
public void SearchTest2() { var rnd = new Random(); var tree = new BPlusTree <int>(2); var list = new List <int>(); var limit = 40; for (int i = 0; i < limit; ++i) { var v = rnd.Next(limit / 2); tree.Add(v); list.Add(v); } list.Sort(); for (int i = 0; i < limit; ++i) { var gt = tree.SearchOfGreter(i).CurrentIndex; Assert.AreEqual(gt, this.SearchGt(list, i)); var ge = tree.SearchOfGreterEqual(i).CurrentIndex; Assert.AreEqual(ge, this.SearchGe(list, i)); var lt = tree.SearchOfLess(i).CurrentIndex; Assert.AreEqual(lt, this.SearchLt(list, i)); var le = tree.SearchOfLessEqual(i).CurrentIndex; Assert.AreEqual(le, this.SearchLe(list, i)); } }
public void RemoveAtTest() { int limit = 1000; var rnd = new Random(); var tree = new BPlusTree <int>(4); for (int i = 0; i < limit; ++i) { tree.Add(i); } var tmp = new List <int>(tree); for (int i = 0; i < limit; ++i) { var del = rnd.Next(tree.Count); tree.RemoveAt(del); tmp.RemoveAt(del); for (int j = 0; j < tree.Count; ++j) { Assert.AreEqual(tree[j], tmp[j]); } } }
public void ToArrayTest() { int limit = 100; var rnd = new Random(); var vals = new List <int>(); for (int i = 0; i < limit; ++i) { vals.Add(rnd.Next(limit)); } var tree = new BPlusTree <int>(4); foreach (var v in vals) { tree.Add(v); } var arr = tree.ToArray(); for (int i = 0; i < arr.Length; ++i) { Assert.AreEqual(tree[i], arr[i]); } Assert.AreEqual(tree.Count, arr.Length); }
public static void Test_DuplicateHandelingOptions() { var options = new BPlusTree <double, string> .OptionsV2(PrimitiveSerializer.Double, PrimitiveSerializer.String); options.CalcBTreeOrder(16, 24); options.FileBlockSize = 8192; options.CreateFile = CreatePolicy.Always; options.FileName = "I:\\test.tmp"; BulkInsertOptions opts = new BulkInsertOptions(); opts.CommitOnCompletion = true; // check how to properly set this value using Roger examples. opts.DuplicateHandling = DuplicateHandling.FirstValueWins; opts.InputIsSorted = true; opts.ReplaceContents = false; var sortedContent = new SortedDictionary <double, string>(); sortedContent.Add(10.0, "Demah"); using (var tree = new BPlusTree <double, string>(options)) { tree.Add(10.0, "Hamed"); tree.BulkInsert(sortedContent, opts); } }
public void Edit(SourceEntity source) { if (source.Title == null || source.Type == null) { throw new ArgumentNullException(); } Delete(source.Source_id); options.CalcBTreeOrder(4, 4); options.CreateFile = CreatePolicy.Never; options.FileName = Path.GetFileName("C:/Users/Дмитрий/Desktop/DMDProject/DMDProject/TreeSource"); using (FileStream fs = new FileStream("C:/Users/Дмитрий/Desktop/DMDProject/DMDProject/Source", FileMode.OpenOrCreate)) { using (var tree = new BPlusTree <int, int>(options)) { tree.Add(Convert.ToInt32(source.Source_id), (int)fs.Length); } fs.Seek((int)fs.Length, 0); using (BinaryWriter bw = new BinaryWriter(fs)) { bw.Write(Convert.ToInt32(source.Source_id)); bw.Write(source.Title); bw.Write(source.Type); } } }
public void Execute(uint stage, SharpMedia.Database.Physical.Journalling.IService service) { Debug.Assert(stage == 0); // 1) We first write to allocated block stream. BlockStream stream = service.AllocationContext.CreateBlockStream((ulong)objectData.LongLength); stream.Write(objectData); // 2) We may need to delete object at index. ObjectInfo data = tree.Find(service, index); if (data != null) { // Deallocate link. BlockStream stream2 = BlockStream.FromBase(data.Address, service); stream2.Deallocate(); // We replace the entry (cheaper than delete and rewrite). tree.Replace(service, new ObjectInfo(index, (ulong)objectData.LongLength, stream.BaseAddress)); } else { // 3) We execute insert operation. tree.Add(service, new ObjectInfo(index, (ulong)objectData.LongLength, stream.BaseAddress)); } }
} //lerCSV() /// <summary> /// Cria a árvore à partir dos dados lidos do CSV /// </summary> /// <param name="dicionario">Dados lidos do CSV</param> /// <returns>Indica se houve erro (false) ou não (true) no processo</returns> public static bool criaArvore(Dictionary <int, DadosOcorrencia> dicionario) { //Cria o componente responsável por serializar os dados a serem escritos na árvore ProtoNetSerializer <DadosOcorrencia> serializer = new ProtoNetSerializer <DadosOcorrencia>(); //Prepara as opções da árvore var tree_options = new BPlusTree <int, DadosOcorrencia> .OptionsV2(PrimitiveSerializer.Int32, serializer); tree_options.CalcBTreeOrder(8, 30); tree_options.CreateFile = CreatePolicy.IfNeeded; tree_options.FileName = path_btree; //Checa se o arquivo já existe if (!File.Exists(path_btree)) { using (var tree = new BPlusTree <int, DadosOcorrencia>(tree_options)) { foreach (KeyValuePair <int, DadosOcorrencia> entry in dicionario) { //Percorre o dicionário e adiciona na árvore tree.Add(entry.Key, entry.Value); } } } //Erro, a árvore já existe! else { return(false); } //Se não houve erros retorna true return(true); } //criaArvore()
public void TestRangeEnumerate() { using (BPlusTree <int, string> data = Create(Options)) { for (int i = 0; i < 100; i++) { data.Add(i, i.ToString()); } int ix = 0; foreach (KeyValuePair <int, string> kv in data.EnumerateRange(-500, 5000)) { Assert.AreEqual(ix++, kv.Key); } Assert.AreEqual(100, ix); foreach (KeyValuePair <int, int> range in new Dictionary <int, int> { { 6, 25 }, { 7, 25 }, { 8, 25 }, { 9, 25 }, { 22, 25 }, { 28, 28 } }) { ix = range.Key; foreach (KeyValuePair <int, string> kv in data.EnumerateRange(ix, range.Value)) { Assert.AreEqual(ix++, kv.Key); } Assert.AreEqual(range.Value, ix - 1); } } }
public void Execute(uint stage, SharpMedia.Database.Physical.Journalling.IService service) { // 1) We read previous object placement and change it. ObjectInfo info = childrenTree.Find(service, (uint)prevName.GetHashCode()); BlockStream stream = BlockStream.FromBase(info.Address, service); ChildTag childTag = Common.DeserializeFromArray(stream.Read(info.Size)) as ChildTag; childTag.Remove(prevName); // Remove it if empty. if (childTag.IsEmpty) { childrenTree.Remove(service, (uint)prevName.GetHashCode(), 1, false); } else { // Update the entry (now without this child). byte[] childTagData = Common.SerializeToArray(childTag); stream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); stream.Write(childTagData); childrenTree.Replace(service, new ObjectInfo((uint)prevName.GetHashCode(), (ulong)childTagData.LongLength, stream.BaseAddress)); } // 3) We create new and insert it into tree. ObjectInfo info2 = childrenTree.Find(service, (uint)newName.GetHashCode()); if (info2 == null) { // We create new tag. childTag = new ChildTag(); childTag.Add(newName, info.Address); byte[] childTagData = Common.SerializeToArray(childTag); stream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); stream.Write(childTagData); // And we add child. childrenTree.Add(service, new ObjectInfo((uint)newName.GetHashCode(), (ulong)childTagData.LongLength, stream.BaseAddress)); } else { // We append it and release previous tag. stream = BlockStream.FromBase(info2.Address, service); childTag = Common.DeserializeFromArray(stream.Read(info2.Size)) as ChildTag; stream.Deallocate(); // We modify and rewrite it. childTag.Add(newName, info.Address); byte[] childTagData = Common.SerializeToArray(childTag); stream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); stream.Write(childTagData); // We insert into children tree. childrenTree.Replace(service, new ObjectInfo((uint)newName.GetHashCode(), (ulong)childTagData.LongLength, info.Address)); } }
public void AddDataTree(string key, string fName, BPlusTree <string, String> .OptionsV2 treeData) { using (var tree = new BPlusTree <string, String>(treeData)) { tree.Add(key, fName); tree.TryGetValue(key, out string cmpDate); var xmlNAME = new XMLRead(); xmlNAME.ReadXmlData(cmpDate); } }
/// <summary> /// Добавляет инфомрацию о продаже в CMS /// </summary> /// <param name="saleEvent">Событие о продаже</param> public void Add(SaleEvent saleEvent) { if (saleEvent == null) { throw new ArgumentNullException(nameof(saleEvent)); } bTree.Add(saleEvent.DateTime, new IndexValue() { Article = saleEvent.Article, Store = saleEvent.StoreName, Count = saleEvent.Count }); bTree.Commit(); }
void TestRandomAddRemove(int repeat, int nodesz, int size) { List <int> keysAdded = new List <int>(250000); BPlusTreeOptions <int, string> options = Options; options.LockingFactory = new IgnoreLockFactory(); Dictionary <int, string> keys = new Dictionary <int, string>(); for (; repeat > 0; repeat--) { keys.Clear(); options.BTreeOrder = nodesz; using (BPlusTree <int, string> data = Create(options)) { data.EnableCount(); AddRandomKeys(size, keys, data); IsSameList(keys, data); keysAdded.Clear(); for (int tc = 0; tc < 1; tc++) { int del = keys.Count / 3 + Random.Next(keys.Count / 3); RemoveRandomKeys(del, keys, data); IsSameList(keys, data); data.Validate(); AddRandomKeys(del, keys, data); IsSameList(keys, data); data.Validate(); } keysAdded.Clear(); foreach (KeyValuePair <int, string> kv in data) { keysAdded.Add(kv.Key); } foreach (int k in keysAdded) { Assert.IsTrue(data.Remove(k)); data.Add(k, k.ToString()); Assert.IsTrue(data.Remove(k)); string test; Assert.IsFalse(data.TryGetValue(k, out test)); Assert.IsNull(test); } } } }
protected override ICollection <string> GenericICollectionFactory(int count) { var list = new BPlusTree <string, string>(); var seed = 13453; for (var i = 0; i < count; i++) { list.Add(CreateT(seed++), CreateT(seed++)); } return(list.Keys); }
void AddStuff(BPlusTree <Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { foreach (var pair in CreateData(100)) { tree.Add(pair.Key, pair.Value); } AddIdle(tree); } }
public void Dictionary_Generic_KeyCollection_GetEnumerator(int count) { var dictionary = new BPlusTree <string, string>(); var seed = 13453; while (dictionary.Count < count) { dictionary.Add(CreateT(seed++), CreateT(seed++)); } dictionary.Keys.GetEnumerator(); }
public void TestAddMany() { var bptree = new BPlusTree <string>(100); for (int i = 0; i < 200; i++) { bptree.Add(BitConverter.GetBytes(i).Reverse().ToArray(), i.ToString()); } Assert.IsTrue(bptree.Count == 200); Assert.IsTrue(KVPSequenceEqual(Enumerable.Range(0, 200) .Select(i => new KeyValuePair <byte[], string>(BitConverter.GetBytes(i) .Reverse().ToArray(), i.ToString())), bptree)); for (int i = 200; i < 200000; i++) { bptree.Add(BitConverter.GetBytes(i).Reverse().ToArray(), i.ToString()); } Assert.IsTrue(bptree.Count == 200000); Assert.IsTrue(KVPSequenceEqual(Enumerable.Range(0, 200000) .Select(i => new KeyValuePair <byte[], string>(BitConverter.GetBytes(i) .Reverse().ToArray(), i.ToString())), bptree)); }
public void TestAutoCommit() { var options = (BPlusTree <int, string> .OptionsV2)Options; options.TransactionLogLimit = 30; using (BPlusTree <int, string> tree = Create(options)) { tree.EnableCount(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Rollback(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Add(2, "B"); //The second write exceeds 30 bytes and auto-commits tree.Rollback(); Assert.AreEqual(2, tree.Count); tree.Add(3, "C"); tree.Add(4, "D"); //The second write will commit, but not the last tree.Add(5, "E"); tree.Rollback(); Assert.AreEqual(4, tree.Count); Assert.IsFalse(tree.ContainsKey(5)); } }
public void PersistOperation(long operationId) { IList <IndexOp <long> > opToCommit; if (opsToCommit.TryGetValue(operationId, out opToCommit)) { foreach (var op in opToCommit) { switch (op.OperationType) { case OpType.Insert: _tree.Add(op.Key, op.RowId); break; case OpType.Remove: _tree.TryRemove(op.Key, op.RowId); break; } } appliedOps.Add(operationId); } }
public void TestFirstAndLast() { using (BPlusTree <int, string> data = Create(Options)) { data.Add(1, "a"); data.Add(2, "b"); data.Add(3, "c"); data.Add(4, "d"); data.Add(5, "e"); Assert.AreEqual(1, data.First().Key); Assert.AreEqual("a", data.First().Value); data.Remove(1); Assert.AreEqual(2, data.First().Key); Assert.AreEqual("b", data.First().Value); Assert.AreEqual(5, data.Last().Key); Assert.AreEqual("e", data.Last().Value); data.Remove(5); Assert.AreEqual(4, data.Last().Key); Assert.AreEqual("d", data.Last().Value); data.Remove(4); data.Remove(3); KeyValuePair <int, string> kv; Assert.IsTrue(data.TryGetLast(out kv)); Assert.IsTrue(data.TryGetFirst(out kv)); data.Remove(2); Assert.IsFalse(data.TryGetLast(out kv)); Assert.IsFalse(data.TryGetFirst(out kv)); try { data.First(); Assert.Fail("Should raise InvalidOperationException"); } catch (InvalidOperationException) { } try { data.Last(); Assert.Fail("Should raise InvalidOperationException"); } catch (InvalidOperationException) { } } }
public void TestCommonConfiguration() { BPlusTree <KeyInfo, DataValue> .Options options = new BPlusTree <KeyInfo, DataValue> .Options(new KeyInfoSerializer(), new DataValueSerializer(), new KeyInfoComparer()); options.CalcBTreeOrder(32, 300); //we can simply just guess close options.FileName = TempFile.TempPath; options.CreateFile = CreatePolicy.Always; //obviously this is just for testing Assert.AreEqual(FileVersion.Version1, options.FileVersion); Random rand = new Random(); KeyInfo k1 = new KeyInfo(), k2 = new KeyInfo(); using (BPlusTree <KeyInfo, DataValue> tree = new BPlusTree <KeyInfo, DataValue>(options)) { byte[] data = new byte[255]; rand.NextBytes(data); tree.Add(k1, new DataValue(k1, data)); Assert.IsTrue(tree.ContainsKey(k1)); Assert.IsFalse(tree.ContainsKey(k1.Next())); Assert.AreEqual(data, tree[k1].Bytes); rand.NextBytes(data); tree.Add(k2, new DataValue(k2, data)); Assert.IsTrue(tree.ContainsKey(k2)); Assert.IsFalse(tree.ContainsKey(k2.Next())); Assert.AreEqual(data, tree[k2].Bytes); } options.CreateFile = CreatePolicy.Never; using (BPlusTree <KeyInfo, DataValue> tree = new BPlusTree <KeyInfo, DataValue>(options)) { Assert.IsTrue(tree.ContainsKey(k1)); Assert.IsTrue(tree.ContainsKey(k2)); } }
public Dictionary<int, string> TestMergeRandom(BPlusTreeOptions<int, string> options, int nsets, int nsize) { Dictionary<int, string> test = new Dictionary<int, string>(); IEnumerable<KeyValuePair<int, string>>[] sets = new List<IEnumerable<KeyValuePair<int, string>>>(CreateSets(nsets, nsize, test)).ToArray(); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(options)) { foreach(IEnumerable<KeyValuePair<int, string>> set in sets) tree.BulkInsert(set, new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins }); VerifyDictionary(test, tree); tree.UnloadCache(); tree.Add(int.MaxValue, "max"); tree.Remove(int.MaxValue); VerifyDictionary(test, tree); } return test; }
public void TestSyncFromLogging() { using (var tempFile = new TempFile()) using (var logfile = new TempFile()) using (var tempCopy = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, TransactionLogFileName = logfile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.FileName = tempCopy.TempPath; readcopy.StoragePerformance = StoragePerformance.Fastest; using (var tree = new BPlusTree<int, string>(options)) using (var copy = new BPlusTree<int, string>(readcopy)) using (var tlog = new TransactionLog<int, string>( new TransactionLogOptions<int, string>(logfile.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String) { ReadOnly = true })) { tree.Add(0, "0"); tree.Commit(); long logpos = 0; copy.EnableCount(); //start by copying the data from tree's file into the copy instance: copy.BulkInsert( BPlusTree<int, string>.EnumerateFile(options), new BulkInsertOptions { InputIsSorted = true, CommitOnCompletion = false, ReplaceContents = true } ); Assert.AreEqual(1, copy.Count); Assert.AreEqual("0", copy[0]); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1, copy.Count); //insert some data... tree.AddRange(MakeValues(1, 99)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(100, copy.Count); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(0, copy.Count); tree.AddRange(MakeValues(1000, 1000)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1000, copy.Count); } } }
void AddStuff(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { foreach(var pair in CreateData(100)) tree.Add(pair.Key, pair.Value); AddIdle(tree); } }