void RemoveRandomKeys(int count, Dictionary <int, string> keys, BPlusTree <int, string> data) { Stopwatch time = new Stopwatch(); time.Start(); int ix = 0; int[] del = new int[count]; foreach (int k in keys.Keys) { del[ix++] = k; if (ix == del.Length) { break; } } foreach (int k in del) { keys.Remove(k); } if (data != null) { for (int i = 0; i < count; i++) { data.Remove(del[i]); } data.Remove(del[0]); } Trace.TraceInformation("Removed {0} in {1}", count, time.ElapsedMilliseconds); }
public void Rename(string source, string dest) { ContentRecord rec; Check.Assert <ArgumentException>(_index.TryGetValue(source, out rec), "The source was not found."); rec = rec.ToBuilder().SetContentUri(dest).Build(); Check.Assert <ArgumentException>(_index.Add(dest, rec), "The target already exists."); _index.Remove(source); }
void TestRandomAddRemove(int repeat, int nodesz, int size) { List <int> keysAdded = new List <int>(250000); BPlusTreeOptions <int, string> options = Options; options.LockingFactory = new IgnoreLockFactory(); Dictionary <int, string> keys = new Dictionary <int, string>(); for (; repeat > 0; repeat--) { keys.Clear(); options.BTreeOrder = nodesz; using (BPlusTree <int, string> data = Create(options)) { data.EnableCount(); AddRandomKeys(size, keys, data); IsSameList(keys, data); keysAdded.Clear(); for (int tc = 0; tc < 1; tc++) { int del = keys.Count / 3 + Random.Next(keys.Count / 3); RemoveRandomKeys(del, keys, data); IsSameList(keys, data); data.Validate(); AddRandomKeys(del, keys, data); IsSameList(keys, data); data.Validate(); } keysAdded.Clear(); foreach (KeyValuePair <int, string> kv in data) { keysAdded.Add(kv.Key); } foreach (int k in keysAdded) { Assert.IsTrue(data.Remove(k)); data.Add(k, k.ToString()); Assert.IsTrue(data.Remove(k)); string test; Assert.IsFalse(data.TryGetValue(k, out test)); Assert.IsNull(test); } } } }
public void TestEnumeration() { var options = Options; options.BTreeOrder = 4; using (BPlusTree <int, string> data = new BPlusTree <int, string>(options)) { data.EnableCount(); data.DebugSetOutput(new StringWriter()); data.DebugSetValidateOnCheckpoint(true); for (int id = 0; id < 10; id++) { data.Add(id, id.ToString()); } using (IEnumerator <KeyValuePair <int, string> > enu = data.GetEnumerator()) { Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(0, enu.Current.Key); for (int id = 2; id < 10; id++) { Assert.IsTrue(data.Remove(id)); } for (int id = 6; id < 11; id++) { data.Add(id, id.ToString()); } Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(1, enu.Current.Key); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(6, enu.Current.Key); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(7, enu.Current.Key); Assert.IsTrue(data.Remove(8)); Assert.IsTrue(data.Remove(9)); Assert.IsTrue(data.Remove(10)); data.Add(11, 11.ToString()); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(11, enu.Current.Key); Assert.IsTrue(false == enu.MoveNext()); } data.Clear(); } }
private BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > > InitialiseProjectVulnerabilitiesCache(string file) { lock (project_vulnerabilities_cache_lock) { BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > > .OptionsV2 cache_file_options = new BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > > .OptionsV2(PrimitiveSerializer.String, new BsonSerializer <Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > >()); cache_file_options.CalcBTreeOrder(4, 128); cache_file_options.CreateFile = CreatePolicy.IfNeeded; cache_file_options.FileName = file; cache_file_options.StoragePerformance = StoragePerformance.CommitToDisk; var c = new BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > >(cache_file_options); c.EnableCount(); IEnumerable <string> expired_cache_keys = from cache_key in c.Keys where DateTime.UtcNow.Subtract(GetProjectVulnerabilitiesCacheEntry(cache_key).Item2) >= this.ProjectVulnerabilitiesCacheTTL join artifact in ArtifactsWithProjects on GetProjectVulnerabilitiesCacheEntry(cache_key).Item1 equals artifact.ProjectId select cache_key; this.ProjectVulnerabilitiesExpiredCacheKeys = expired_cache_keys; foreach (string k in expired_cache_keys) { if (!c.Remove(k)) { throw new Exception("Error removing expired cache item with key: " + k + "."); } } return(c); } }
public void Remove(int capacity) { var nodeStorage = new BPlusTreeNodeMemoryStorage <ComparableKeyOf <Int32> >(capacity); var tree = new BPlusTree <ComparableKeyOf <Int32>, ValueOf <Int32> >(nodeStorage, new MemoryValueStorage <ValueOf <Int32> >()); int count = capacity * capacity * capacity; for (int i = 0; i < count; i++) { tree.Set(i, i); } for (int i = 0; i < count; i++) { tree.Remove(i); } Assert.AreEqual(0, tree.Count()); for (int i = 0; i < count; i++) { Assert.IsNull(tree.Get(i)); } string message; Assert.IsTrue(tree.CheckConsistency(out message)); }
public void TestReadOnly() { using (TempFile file = new TempFile()) { var opt = new BPlusTree <int, int> .Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.Always, FileName = file.TempPath, }; using (BPlusTree <int, int> tree = new BPlusTree <int, int>(opt)) { tree.Add(1, 2); tree.Add(3, 4); tree.Add(5, 6); } opt.CreateFile = CreatePolicy.Never; opt.ReadOnly = true; using (BPlusTree <int, int> tree = new BPlusTree <int, int>(opt)) { Assert.AreEqual(tree[1], 2); Assert.AreEqual(tree[3], 4); Assert.AreEqual(tree[5], 6); try { tree[1] = 0; Assert.Fail(); } catch (InvalidOperationException) { } try { tree.Remove(1); Assert.Fail(); } catch (InvalidOperationException) { } } } }
public void TestReadOnly() { using (TempFile file = new TempFile()) { var opt = new BPlusTree<int, int>.Options(PrimitiveSerializer.Int32, PrimitiveSerializer.Int32) { CreateFile = CreatePolicy.Always, FileName = file.TempPath, }; using (BPlusTree<int, int> tree = new BPlusTree<int, int>(opt)) { tree.Add(1, 2); tree.Add(3, 4); tree.Add(5, 6); } opt.CreateFile = CreatePolicy.Never; opt.ReadOnly = true; using (BPlusTree<int, int> tree = new BPlusTree<int, int>(opt)) { Assert.AreEqual(tree[1], 2); Assert.AreEqual(tree[3], 4); Assert.AreEqual(tree[5], 6); try { tree[1] = 0; Assert.Fail(); } catch (InvalidOperationException) { } try { tree.Remove(1); Assert.Fail(); } catch (InvalidOperationException) { } } } }
private int DeleteProjectVulnerabilitiesExpired() { if (ProjectVulnerabilitiesCacheEnabled) { IEnumerable <string> expired_cache_keys = from cache_key in ProjectVulnerabilitiesCache.Keys where DateTime.UtcNow.Subtract(GetProjectVulnerabilitiesCacheEntry(cache_key).Item2) >= this.ProjectVulnerabilitiesCacheTTL join artifact in ArtifactProjects on GetProjectVulnerabilitiesCacheEntry(cache_key).Item1 equals artifact.ProjectId select cache_key; this.ProjectVulnerabilitiesExpiredCacheKeys = expired_cache_keys; foreach (string k in expired_cache_keys) { if (!ProjectVulnerabilitiesCache.Remove(k)) { throw new Exception("Error removing expired cache item with key: " + k + "."); } } return(expired_cache_keys.Count()); } else { throw new Exception("Project vulnerabilities cache is not enabled."); } }
public Dictionary <int, string> TestMergeRandom(BPlusTreeOptions <int, string> options, int nsets, int nsize) { Dictionary <int, string> test = new Dictionary <int, string>(); IEnumerable <KeyValuePair <int, string> >[] sets = new List <IEnumerable <KeyValuePair <int, string> > >(CreateSets(nsets, nsize, test)).ToArray(); using (BPlusTree <int, string> tree = new BPlusTree <int, string>(options)) { foreach (IEnumerable <KeyValuePair <int, string> > set in sets) { tree.BulkInsert(set, new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins }); } VerifyDictionary(test, tree); tree.UnloadCache(); tree.Add(int.MaxValue, "max"); tree.Remove(int.MaxValue); VerifyDictionary(test, tree); } return(test); }
void DeleteStuff(BPlusTree <Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { if (tree.Count > 1000) { int limit = tree.Count - 1000; foreach (Guid key in tree.Keys) { if (!tree.Remove(key)) { throw new ApplicationException(); } if (--limit <= 0) { break; } } } else { Thread.Sleep(1); } } }
public void Execute(uint stage, SharpMedia.Database.Physical.Journalling.IService service) { // 1) We read previous object placement and change it. ObjectInfo info = childrenTree.Find(service, (uint)prevName.GetHashCode()); BlockStream stream = BlockStream.FromBase(info.Address, service); ChildTag childTag = Common.DeserializeFromArray(stream.Read(info.Size)) as ChildTag; childTag.Remove(prevName); // Remove it if empty. if (childTag.IsEmpty) { childrenTree.Remove(service, (uint)prevName.GetHashCode(), 1, false); } else { // Update the entry (now without this child). byte[] childTagData = Common.SerializeToArray(childTag); stream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); stream.Write(childTagData); childrenTree.Replace(service, new ObjectInfo((uint)prevName.GetHashCode(), (ulong)childTagData.LongLength, stream.BaseAddress)); } // 3) We create new and insert it into tree. ObjectInfo info2 = childrenTree.Find(service, (uint)newName.GetHashCode()); if (info2 == null) { // We create new tag. childTag = new ChildTag(); childTag.Add(newName, info.Address); byte[] childTagData = Common.SerializeToArray(childTag); stream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); stream.Write(childTagData); // And we add child. childrenTree.Add(service, new ObjectInfo((uint)newName.GetHashCode(), (ulong)childTagData.LongLength, stream.BaseAddress)); } else { // We append it and release previous tag. stream = BlockStream.FromBase(info2.Address, service); childTag = Common.DeserializeFromArray(stream.Read(info2.Size)) as ChildTag; stream.Deallocate(); // We modify and rewrite it. childTag.Add(newName, info.Address); byte[] childTagData = Common.SerializeToArray(childTag); stream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); stream.Write(childTagData); // We insert into children tree. childrenTree.Replace(service, new ObjectInfo((uint)newName.GetHashCode(), (ulong)childTagData.LongLength, info.Address)); } }
public void Delete(string a_d_id) { options.CalcBTreeOrder(4, 4); options.CreateFile = CreatePolicy.Never; options.FileName = Path.GetFileName("C:/Users/Дмитрий/Desktop/DMDProject/DMDProject/TreeAuthor_Document"); using (var tree = new BPlusTree <int, int>(options)) { tree.Remove(Convert.ToInt32(a_d_id)); } }
public void TestReadOnlyCopy() { using (var tempFile = new TempFile()) { var options = new BPlusTree <int, string> .OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, } .CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.CreateFile = CreatePolicy.Never; readcopy.ReadOnly = true; using (var tree = new BPlusTree <int, string>(options)) { using (var copy = new BPlusTree <int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } //insert some data... tree.AddRange(MakeValues(0, 100)); using (var copy = new BPlusTree <int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } tree.Commit(); //insert some data... for (int i = 0; i < 100; i++) { tree.Remove(i); } tree.AddRange(MakeValues(1000, 1000)); using (var copy = new BPlusTree <int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(100, copy.Count); Assert.AreEqual(0, copy.First().Key); Assert.AreEqual(99, copy.Last().Key); } tree.Commit(); } } }
public override void Delete(byte[] key) { lock (index) { long OO = index [key]; MemoryMappedViewAccessor accessor = mmfBlock.CreateViewAccessor(reservedBytes + OO, 1, MemoryMappedFileAccess.ReadWrite); accessor.Write(0, true); accessor.Flush(); accessor.Dispose(); index.Remove(key); index.Commit(); } }
public void TestCounts() { using (BPlusTree <int, string> data = Create(Options)) { Assert.AreEqual(int.MinValue, data.Count); data.EnableCount(); Assert.AreEqual(0, data.Count); Assert.IsTrue(data.TryAdd(1, "test")); Assert.AreEqual(1, data.Count); Assert.IsTrue(data.TryAdd(2, "test")); Assert.AreEqual(2, data.Count); Assert.IsFalse(data.TryAdd(2, "test")); Assert.AreEqual(2, data.Count); Assert.IsTrue(data.Remove(1)); Assert.AreEqual(1, data.Count); Assert.IsTrue(data.Remove(2)); Assert.AreEqual(0, data.Count); } }
public void TestFirstAndLast() { using (BPlusTree <int, string> data = Create(Options)) { data.Add(1, "a"); data.Add(2, "b"); data.Add(3, "c"); data.Add(4, "d"); data.Add(5, "e"); Assert.AreEqual(1, data.First().Key); Assert.AreEqual("a", data.First().Value); data.Remove(1); Assert.AreEqual(2, data.First().Key); Assert.AreEqual("b", data.First().Value); Assert.AreEqual(5, data.Last().Key); Assert.AreEqual("e", data.Last().Value); data.Remove(5); Assert.AreEqual(4, data.Last().Key); Assert.AreEqual("d", data.Last().Value); data.Remove(4); data.Remove(3); KeyValuePair <int, string> kv; Assert.IsTrue(data.TryGetLast(out kv)); Assert.IsTrue(data.TryGetFirst(out kv)); data.Remove(2); Assert.IsFalse(data.TryGetLast(out kv)); Assert.IsFalse(data.TryGetFirst(out kv)); try { data.First(); Assert.Fail("Should raise InvalidOperationException"); } catch (InvalidOperationException) { } try { data.Last(); Assert.Fail("Should raise InvalidOperationException"); } catch (InvalidOperationException) { } } }
public void TestReadOnlyCopy() { using (var tempFile = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.CreateFile = CreatePolicy.Never; readcopy.ReadOnly = true; using (var tree = new BPlusTree<int, string>(options)) { using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } //insert some data... tree.AddRange(MakeValues(0, 100)); using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } tree.Commit(); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tree.AddRange(MakeValues(1000, 1000)); using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(100, copy.Count); Assert.AreEqual(0, copy.First().Key); Assert.AreEqual(99, copy.Last().Key); } tree.Commit(); } } }
public void Execute(uint stage, IService service) { // 1) We first delete all objects. List <ObjectInfo> all = tree.List(service, index, count); foreach (ObjectInfo info in all) { BlockStream stream = BlockStream.FromBase(info.Address, service); stream.Deallocate(); } // 2) We delete them from B+ tree. tree.Remove(service, index, count, decreaseCount); }
private void SingleNodeOperations(int capacity) { var nodeStorage = new BPlusTreeNodeMemoryStorage <ComparableKeyOf <Int32> >(capacity); var tree = new BPlusTree <ComparableKeyOf <Int32>, ValueOf <Int32> >(nodeStorage, new MemoryValueStorage <ValueOf <Int32> >()); // place sequential numbers for (int i = 0; i < capacity; i++) { tree.Set(i, i); } // check if we can retrieve correct values for (int i = 0; i < capacity; i++) { var value = tree.Get(i); Assert.AreEqual(i, value.Value); } Assert.AreEqual(capacity, tree.Count()); // update with the other values for (int i = 0; i < capacity; i++) { tree.Set(i, capacity - 1 - i); } // check again for (int i = 0; i < capacity; i++) { var value = tree.Get(i); Assert.AreEqual(capacity - 1 - i, value.Value); } // delete all keys for (int i = 0; i < capacity; i++) { tree.Remove(i); } Assert.AreEqual(0, tree.Count()); // check if every key returns null for (int i = 0; i < capacity; i++) { var value = tree.Get(i); Assert.IsNull(value); } }
/// <summary> ///A test for Remove ///</summary> public void RemoveTestHelper <T>() { int degree = 2; BPlusTree <double> target = new BPlusTree <double>(degree); double data = 6; target.Add(1); target.Add(2); target.Add(3); target.Add(4); target.Add(5); target.Remove(data); SearchResult actual = target.Contains(data); Assert.IsNotNull(actual.SearchPath); }
public void TestAbortWritersAndRecover() { BPlusTree <KeyInfo, DataValue> .Options options = new BPlusTree <KeyInfo, DataValue> .Options( new KeyInfoSerializer(), new DataValueSerializer(), new KeyInfoComparer()); options.CalcBTreeOrder(32, 300); options.FileName = TempFile.TempPath; options.CreateFile = CreatePolicy.Always; using (TempFile copy = new TempFile()) { copy.Delete(); int minRecordCreated = StartAndAbortWriters(options, copy); using (TempFile.Attach(copy.TempPath + ".recovered")) //used to create the new copy using (TempFile.Attach(copy.TempPath + ".deleted")) //renamed existing file { options.CreateFile = CreatePolicy.Never; int recoveredRecords = BPlusTree <KeyInfo, DataValue> .RecoverFile(options); if (recoveredRecords < RecordsCreated) { Assert.Fail("Unable to recover records, recieved ({0} of {1}).", recoveredRecords, RecordsCreated); } options.FileName = copy.TempPath; recoveredRecords = BPlusTree <KeyInfo, DataValue> .RecoverFile(options); Assert.IsTrue(recoveredRecords >= minRecordCreated, "Expected at least " + minRecordCreated + " found " + recoveredRecords); using (BPlusTree <KeyInfo, DataValue> dictionary = new BPlusTree <KeyInfo, DataValue>(options)) { dictionary.EnableCount(); Assert.AreEqual(recoveredRecords, dictionary.Count); foreach (KeyValuePair <KeyInfo, DataValue> kv in dictionary) { Assert.AreEqual(kv.Key.UID, kv.Value.Key.UID); dictionary.Remove(kv.Key); } Assert.AreEqual(0, dictionary.Count); } } } }
public void TestMillionRecords() { int count = 1000000; var r = new Random(); var pairs = new Dictionary <int, int>(); for (int i = 0; i < count; i++) { pairs[i] = r.Next(1000000); } var nodeStorage = new BPlusTreeNodeMemoryStorage <ComparableKeyOf <String> >(250); var tree = new BPlusTree <ComparableKeyOf <String>, ValueOf <String> >(nodeStorage, new MemoryValueStorage <ValueOf <String> >()); foreach (var pair in pairs) { tree.Set(pair.Key.ToString(CultureInfo.InvariantCulture), pair.Value.ToString(CultureInfo.InvariantCulture)); } var removedPairs = new Dictionary <int, int>(); foreach (var pair in pairs) { if (r.NextDouble() > 0.5) { removedPairs.Add(pair.Key, pair.Value); tree.Remove(pair.Key.ToString(CultureInfo.InvariantCulture)); } } foreach (var pair in pairs) { string value = tree.Get(pair.Key.ToString(CultureInfo.InvariantCulture)); if (removedPairs.ContainsKey(pair.Key)) { Assert.IsNull(value); } else { Assert.AreEqual(pair.Value.ToString(CultureInfo.InvariantCulture), value); } } }
public void TestRandomOperations() { int capacity = 10; var nodeStorage = new BPlusTreeNodeMemoryStorage <ComparableKeyOf <Int32> >(capacity); var tree = new BPlusTree <ComparableKeyOf <Int32>, ValueOf <Int32> >(nodeStorage, new MemoryValueStorage <ValueOf <Int32> >()); int keyCount = capacity * capacity * capacity; int operationCount = keyCount * keyCount; var flags = new bool[keyCount]; var r = new Random(); for (int i = 0; i < operationCount; i++) { var nextIndex = r.Next(keyCount); if (flags[nextIndex]) { tree.Remove(nextIndex); } else { tree.Set(nextIndex, nextIndex); } flags[nextIndex] = !flags[nextIndex]; } for (int i = 0; i < keyCount; i++) { var value = tree.Get(i); if (flags[i]) { Assert.AreEqual(i, value.Value); } else { Assert.IsNull(value); } } Assert.AreEqual(flags.Count(f => f), tree.Count()); string message; Assert.IsTrue(tree.CheckConsistency(out message)); }
void DeleteStuff(BPlusTree<Guid, TestInfo> tree) { while (!mreStop.WaitOne(0, false)) { if (tree.Count > 1000) { int limit = tree.Count - 1000; foreach (Guid key in tree.Keys) { if (!tree.Remove(key)) throw new ApplicationException(); if (--limit <= 0) break; } } else Thread.Sleep(1); } }
public void TestAddRemoveFew() { var BPTree = new BPlusTree <BlobLocation>(100); var testblob1 = new BlobLocation(null, "somewhere1", 0); BlobLocation bl2 = new BlobLocation(null, "somewhere2", 4); BlobLocation bl3 = new BlobLocation(new byte[][] { new byte[] { 1, 2 }, new byte[] { 1, 0 } }.ToList()); BlobLocation bl4 = new BlobLocation(); var rng = new Random(); var testkey1 = new byte[20]; rng.NextBytes(testkey1); byte[] key2 = new byte[20]; rng.NextBytes(key2); byte[] key3 = new byte[20]; rng.NextBytes(key3); byte[] key4 = new byte[20]; rng.NextBytes(key4); byte[] key5 = new byte[20]; rng.NextBytes(key5); BPTree.AddOrFind(testkey1, testblob1); BPTree.AddOrFind(key2, bl2); BPTree.AddOrFind(key3, bl3); BPTree.AddOrFind(key4, bl4); BPTree.AddOrFind(key5, bl4); BPTree.AddOrFind(testkey1, testblob1); List <KeyValuePair <byte[], BlobLocation> > treecontents1 = new List <KeyValuePair <byte[], BlobLocation> >(BPTree); BPTree.Remove(testkey1); List <KeyValuePair <byte[], BlobLocation> > treecontents2 = new List <KeyValuePair <byte[], BlobLocation> >(BPTree); BPTree.AddOrFind(testkey1, testblob1); List <KeyValuePair <byte[], BlobLocation> > treecontents3 = new List <KeyValuePair <byte[], BlobLocation> >(BPTree); Assert.IsFalse(TreeContentsMatch(treecontents1, treecontents2)); Assert.IsTrue(TreeContentsMatch(treecontents1, treecontents3)); }
public void TestInserts() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); int[][] TestArrays = new int[][] { new int[] { 10, 18, 81, 121, 76, 31, 250, 174, 24, 38, 246, 79 }, new int[] { 110, 191, 84, 218, 170, 217, 199, 232, 184, 254, 32, 90, 241, 136, 181, 28, 226, 69, 52 }, }; foreach (int[] arry in TestArrays) { data.Clear(); Assert.AreEqual(0, data.Count); int count = 0; foreach (int id in arry) { data.Add(id, id.ToString()); Assert.AreEqual(++count, data.Count); } Assert.AreEqual(arry.Length, data.Count); data.UnloadCache(); foreach (int id in arry) { Assert.AreEqual(id.ToString(), data[id]); data[id] = String.Empty; Assert.AreEqual(String.Empty, data[id]); Assert.IsTrue(data.Remove(id)); Assert.AreEqual(--count, data.Count); } Assert.AreEqual(0, data.Count); } } }
public void BPlusTreeDemo() { var options = new BPlusTree <string, DateTime> .OptionsV2(PrimitiveSerializer.String, PrimitiveSerializer.DateTime); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = Path.GetTempFileName(); using (var tree = new BPlusTree <string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { tree.Add(file.FullName, file.LastWriteTimeUtc); } } options.CreateFile = CreatePolicy.Never; using (var tree = new BPlusTree <string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { DateTime cmpDate; if (!tree.TryGetValue(file.FullName, out cmpDate)) { Console.WriteLine("New file: {0}", file.FullName); } else if (cmpDate != file.LastWriteTimeUtc) { Console.WriteLine("Modified: {0}", file.FullName); } tree.Remove(file.FullName); } foreach (var item in tree) { Console.WriteLine("Removed: {0}", item.Key); } } }
public void TestAddRemoveMany() { var BPTree = new BPlusTree <byte[]>(100); Random random = new Random(80); List <byte[]> keyvals = new List <byte[]>(); for (int i = 0; i < 107; i++) { byte[] keyval = new byte[20]; random.NextBytes(keyval); BPTree.Add(keyval, keyval); keyvals.Add(keyval); } ValidateTree(BPTree, keyvals); for (int i = 0; i < 107; i++) { int remidx = random.Next(keyvals.Count); BPTree.Remove(keyvals[remidx]); keyvals.RemoveAt(remidx); ValidateTree(BPTree, keyvals); } }
public void RandomSequenceTest() { int iterations = 5; int limit = 255; using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); List <int> numbers = new List <int>(); while (iterations-- > 0) { data.Clear(); numbers.Clear(); data.DebugSetValidateOnCheckpoint(true); for (int i = 0; i < limit; i++) { int id = Random.Next(limit); if (!numbers.Contains(id)) { numbers.Add(id); data.Add(id, "V" + id); } } Assert.AreEqual(numbers.Count, data.Count); foreach (int number in numbers) { Assert.IsTrue(data.Remove(number)); } Assert.AreEqual(0, data.Count); } } }
public void TestSyncFromLogging() { using (var tempFile = new TempFile()) using (var logfile = new TempFile()) using (var tempCopy = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, TransactionLogFileName = logfile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.FileName = tempCopy.TempPath; readcopy.StoragePerformance = StoragePerformance.Fastest; using (var tree = new BPlusTree<int, string>(options)) using (var copy = new BPlusTree<int, string>(readcopy)) using (var tlog = new TransactionLog<int, string>( new TransactionLogOptions<int, string>(logfile.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String) { ReadOnly = true })) { tree.Add(0, "0"); tree.Commit(); long logpos = 0; copy.EnableCount(); //start by copying the data from tree's file into the copy instance: copy.BulkInsert( BPlusTree<int, string>.EnumerateFile(options), new BulkInsertOptions { InputIsSorted = true, CommitOnCompletion = false, ReplaceContents = true } ); Assert.AreEqual(1, copy.Count); Assert.AreEqual("0", copy[0]); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1, copy.Count); //insert some data... tree.AddRange(MakeValues(1, 99)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(100, copy.Count); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(0, copy.Count); tree.AddRange(MakeValues(1000, 1000)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1000, copy.Count); } } }
/// <summary> /// Executes the operation. /// </summary> /// <param name="service">Service for operation.</param> unsafe void ExecuteInternal(IService service) { ulong childrenTS; ulong versionTS; // 1) We extract child TS and version TS Block block = service.Read(BlockType.NodeHeaderBlock, commonAddress); fixed(byte *p = block.Data) { NodeCommonHeader *header = (NodeCommonHeader *)p; childrenTS = header->ChildrenBTree; versionTS = header->VersionsBTree; } // 2) Can get rid of node. service.DeAllocate(commonAddress); // 3) We delete children TS, it should be empty (since deletes were already called on nodes). service.DeAllocate(childrenTS); // 4) We go through versions. BPlusTree versionsTree = new BPlusTree(versionTS); List <ObjectInfo> versions = versionsTree.ListAll(service); // 5) We delete each version. foreach (ObjectInfo info in versions) { // a) Read version tag. BlockStream versionTagStream = BlockStream.FromBase(info.Address, service); VersionTag versionTag = Common.DeserializeFromArray(versionTagStream.Read(info.Size)) as VersionTag; versionTagStream.Deallocate(); foreach (KeyValuePair <ulong, ulong> versionToNode in versionTag.VersionAddress) { block = service.Read(BlockType.NodeHeaderBlock, versionToNode.Value); List <ulong> typedStreams = NodeVersionHelper.ListAllTypedStreamsAsAddresses(block, service); // b) Delete all typed streams. for (int i = 0; i < typedStreams.Count; i++) { // 1) We delete the typed stream object. block = service.Read(BlockType.TypedStreamHeader, typedStreams[i]); fixed(byte *p = block.Data) { TypedStreamHeader *header = (TypedStreamHeader *)p; // We delete single object. if ((header->Options & StreamOptions.SingleObject) != 0) { if (header->ObjectsAddress != 0) { BlockStream stream = BlockStream.FromBase(header->ObjectsAddress, service); stream.Deallocate(); } } else { // We delete all children. BPlusTree tree = new BPlusTree(header->ObjectsAddress); foreach (ObjectInfo info2 in tree.ListAll(service)) { BlockStream stream = BlockStream.FromBase(info2.Address, service); stream.Deallocate(); } } } // 2) We also delete the header itself. service.DeAllocate(typedStreams[i]); } // c) We deallocate version block. service.DeAllocate(versionToNode.Value); } } // 6) We delete the tree. versionsTree.DeallocateTree(service); // 7) We must erase the node from root. ObjectInfo childInfo = parentChildTree.Find(service, (uint)childName.GetHashCode()); BlockStream childTagStream = BlockStream.FromBase(childInfo.Address, service); byte[] childTagData = childTagStream.Read(childInfo.Size); ChildTag childTag = Common.DeserializeFromArray(childTagData) as ChildTag; childTagStream.Deallocate(); if (childTag.Children.Count == 1) { // Simply delete it. parentChildTree.Remove(service, (uint)childName.GetHashCode(), 1, false); } else { // We have to replace it. childTag.Remove(childName); childTagData = Common.SerializeToArray(childTag); childTagStream = service.AllocationContext.CreateBlockStream((ulong)childTagData.LongLength); childTagStream.Write(childTagData); parentChildTree.Replace(service, new ObjectInfo((uint)childName.GetHashCode(), (ulong)childTagData.LongLength, childTagStream.BaseAddress)); } }
public void ExplicitRangeAddRemove() { string test; using (BPlusTree <int, string> data = Create(Options)) { data.Add(2, "v2"); data.Add(1, "v1"); int i = 0; for (; i < 8; i++) { data.TryAdd(i, "v" + i); } for (i = 16; i >= 8; i--) { data.TryAdd(i, "v" + i); } data.TryAdd(13, "v" + i); for (i = 0; i <= 16; i++) { if (!data.TryGetValue(i, out test)) { throw new ApplicationException(); } Assert.AreEqual("v" + i, test); } data.Remove(1); data.Remove(3); IEnumerator <KeyValuePair <int, string> > e = data.GetEnumerator(); Assert.IsTrue(e.MoveNext()); Assert.AreEqual(0, e.Current.Key); data.Add(1, "v1"); Assert.IsTrue(e.MoveNext()); data.Add(3, "v3"); Assert.IsTrue(e.MoveNext()); data.Remove(8); Assert.IsTrue(e.MoveNext()); e.Dispose(); data.Add(8, "v8"); i = 0; foreach (KeyValuePair <int, string> pair in data) { Assert.AreEqual(pair.Key, i++); } for (i = 0; i <= 16; i++) { Assert.IsTrue(data.Remove(i) && data.TryAdd(i, "v" + i)); } for (i = 6; i <= 12; i++) { Assert.IsTrue(data.Remove(i)); } for (i = 6; i <= 12; i++) { Assert.IsFalse(data.TryGetValue(i, out test)); Assert.IsNull(test); } for (i = 0; i <= 5; i++) { Assert.IsTrue(data.TryGetValue(i, out test)); Assert.AreEqual("v" + i, test); } for (i = 13; i <= 16; i++) { Assert.IsTrue(data.TryGetValue(i, out test)); Assert.AreEqual("v" + i, test); } for (i = 0; i <= 16; i++) { Assert.AreEqual(i < 6 || i > 12, data.Remove(i)); } } }
public Dictionary<int, string> TestMergeRandom(BPlusTreeOptions<int, string> options, int nsets, int nsize) { Dictionary<int, string> test = new Dictionary<int, string>(); IEnumerable<KeyValuePair<int, string>>[] sets = new List<IEnumerable<KeyValuePair<int, string>>>(CreateSets(nsets, nsize, test)).ToArray(); using (BPlusTree<int, string> tree = new BPlusTree<int, string>(options)) { foreach(IEnumerable<KeyValuePair<int, string>> set in sets) tree.BulkInsert(set, new BulkInsertOptions { DuplicateHandling = DuplicateHandling.LastValueWins }); VerifyDictionary(test, tree); tree.UnloadCache(); tree.Add(int.MaxValue, "max"); tree.Remove(int.MaxValue); VerifyDictionary(test, tree); } return test; }
public void BPlusTreeDemo() { var options = new BPlusTree<string, DateTime>.OptionsV2(PrimitiveSerializer.String, PrimitiveSerializer.DateTime); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = Path.GetTempFileName(); using (var tree = new BPlusTree<string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { tree.Add(file.FullName, file.LastWriteTimeUtc); } } options.CreateFile = CreatePolicy.Never; using (var tree = new BPlusTree<string, DateTime>(options)) { var tempDir = new DirectoryInfo(Path.GetTempPath()); foreach (var file in tempDir.GetFiles("*", SearchOption.AllDirectories)) { DateTime cmpDate; if (!tree.TryGetValue(file.FullName, out cmpDate)) Console.WriteLine("New file: {0}", file.FullName); else if (cmpDate != file.LastWriteTimeUtc) Console.WriteLine("Modified: {0}", file.FullName); tree.Remove(file.FullName); } foreach (var item in tree) { Console.WriteLine("Removed: {0}", item.Key); } } }
/// <summary> /// Удаляет запись за конкретное время и по конкретному артикулу /// </summary> /// <param name="dateTime"></param> public void Remove(DateTime dateTime) { bTree.Remove(dateTime); bTree.Commit(); }
void SequencedTest(int start, int incr, int stop, string name) { int count = Math.Abs(start - stop)/Math.Abs(incr); const string myTestValue1 = "T1", myTestValue2 = "t2"; string test; using (BPlusTree<int, string> data = new BPlusTree<int, string>(Options)) { Stopwatch time = new Stopwatch(); time.Start(); //large order-forward for (int i = start; i != stop; i += incr) if (!data.TryAdd(i, myTestValue1)) throw new ApplicationException(); Trace.TraceInformation("{0} insert {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.TryGetValue(i, out test) || test != myTestValue1) throw new ApplicationException(); Trace.TraceInformation("{0} seek {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.TryUpdate(i, myTestValue2)) throw new ApplicationException(); Trace.TraceInformation("{0} modify {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.TryGetValue(i, out test) || test != myTestValue2) throw new ApplicationException(); Trace.TraceInformation("{0} seek#2 {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); int tmpCount = 0; foreach (KeyValuePair<int, string> tmp in data) if (tmp.Value != myTestValue2) throw new ApplicationException(); else tmpCount++; if (tmpCount != count) throw new ApplicationException(); Trace.TraceInformation("{0} foreach {1} in {2}", name, count, time.ElapsedMilliseconds); time.Reset(); time.Start(); for (int i = start; i != stop; i += incr) if (!data.Remove(i)) throw new ApplicationException(); Trace.TraceInformation("{0} delete {1} in {2}", name, count, time.ElapsedMilliseconds); for (int i = start; i != stop; i += incr) if (data.TryGetValue(i, out test)) throw new ApplicationException(); } }