public void TestCommitRollback() { using (BPlusTree <int, string> tree = Create(Options)) { tree.EnableCount(); Assert.AreEqual(0, tree.Count); tree.Rollback(); Assert.AreEqual(0, tree.Count); tree.Commit(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Rollback(); Assert.AreEqual(0, tree.Count); tree.Commit(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Commit(); Assert.AreEqual(1, tree.Count); tree.Rollback(); Assert.AreEqual(1, tree.Count); tree.Add(2, "B"); tree.Rollback(); Assert.AreEqual(1, tree.Count); tree[1] = "abc"; tree.Commit(); Assert.AreEqual(1, tree.Count); tree.Rollback(); Assert.AreEqual("abc", tree[1]); Assert.IsFalse(tree.ContainsKey(2)); } }
private static void VerifyDictionary(Dictionary <int, string> expected, BPlusTree <int, string> tree) { tree.Validate(); tree.EnableCount(); Dictionary <int, string> test = new Dictionary <int, string>(expected); List <KeyValuePair <int, string> > pairs = new List <KeyValuePair <int, string> >(test); string val; foreach (KeyValuePair <int, string> pair in tree) { Assert.IsTrue(test.TryGetValue(pair.Key, out val)); Assert.AreEqual(pair.Value, val); Assert.IsTrue(test.Remove(pair.Key)); } Assert.AreEqual(0, test.Count); test = null; Assert.IsNull(test); Assert.AreEqual(pairs.Count, tree.Count); foreach (KeyValuePair <int, string> pair in pairs) { Assert.IsTrue(tree.TryGetValue(pair.Key, out val)); Assert.AreEqual(pair.Value, val); } }
public void TestBulkInsert() { Stopwatch sw = Stopwatch.StartNew(); var options = Options.Clone(); using (TempFile temp = new TempFile()) { //using (BPlusTree<int, string> data = Create(Options)) using (BPlusTree <int, string> data = Create(options)) { const bool bulk = true; Insert(data, CreateRandom(1000, 3000), bulk, false); data.EnableCount(); Assert.AreEqual(1000, data.Count); Insert(data, CreateCount(data.Last().Key + 1, 1, 1000), bulk, true); Assert.AreEqual(2000, data.Count); Insert(data, CreateCount(data.Last().Key + 10001, -1, 1000), bulk, false); Assert.AreEqual(3000, data.Count); int lastKey = data.Last().Key; data.AddRange(CreateCount(1, 2, lastKey / 2), true); } temp.Dispose(); } Trace.WriteLine("Inserted in " + sw.Elapsed); }
public void TestAtomicAddOrUpdate() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); int[] counter = new int[] { -1 }; for (int i = 0; i < 100; i++) { data.AddOrUpdate(i, k => (++counter[0]).ToString(), (k, v) => { throw new InvalidOperationException(); }); } for (int i = 0; i < 100; i++) { Assert.AreEqual((i & 1) == 1, data.TryRemove(i, (k, v) => (int.Parse(v) & 1) == 1)); } for (int i = 0; i < 100; i++) { data.AddOrUpdate(i, k => (++counter[0]).ToString(), (k, v) => (++counter[0]).ToString()); } Assert.AreEqual(100, data.Count); Assert.AreEqual(200, counter[0] + 1); for (int i = 0; i < 100; i++) { Assert.IsTrue(data.TryRemove(i, (k, v) => int.Parse(v) - 100 == i)); } Assert.AreEqual(0, data.Count); } }
private BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > > InitialiseProjectVulnerabilitiesCache(string file) { lock (project_vulnerabilities_cache_lock) { BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > > .OptionsV2 cache_file_options = new BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > > .OptionsV2(PrimitiveSerializer.String, new BsonSerializer <Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > >()); cache_file_options.CalcBTreeOrder(4, 128); cache_file_options.CreateFile = CreatePolicy.IfNeeded; cache_file_options.FileName = file; cache_file_options.StoragePerformance = StoragePerformance.CommitToDisk; var c = new BPlusTree <string, Tuple <OSSIndexProject, IEnumerable <OSSIndexProjectVulnerability> > >(cache_file_options); c.EnableCount(); IEnumerable <string> expired_cache_keys = from cache_key in c.Keys where DateTime.UtcNow.Subtract(GetProjectVulnerabilitiesCacheEntry(cache_key).Item2) >= this.ProjectVulnerabilitiesCacheTTL join artifact in ArtifactsWithProjects on GetProjectVulnerabilitiesCacheEntry(cache_key).Item1 equals artifact.ProjectId select cache_key; this.ProjectVulnerabilitiesExpiredCacheKeys = expired_cache_keys; foreach (string k in expired_cache_keys) { if (!c.Remove(k)) { throw new Exception("Error removing expired cache item with key: " + k + "."); } } return(c); } }
public void TestAtomicInterfaces() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); data[1] = "a"; AddUpdateValue update = new AddUpdateValue(); Assert.IsFalse(data.AddOrUpdate(1, ref update)); Assert.AreEqual("a", update.OldValue); Assert.IsFalse(data.AddOrUpdate(2, ref update)); Assert.IsNull(update.OldValue); Assert.IsFalse(data.TryRemove(1, ref update)); Assert.AreEqual("a", update.OldValue); Assert.AreEqual(1, data.Count); Assert.AreEqual("a", data[1]); update.Value = "b"; Assert.IsTrue(data.AddOrUpdate(1, ref update)); Assert.AreEqual("a", update.OldValue); Assert.IsTrue(data.AddOrUpdate(2, ref update)); Assert.IsNull(update.OldValue); Assert.AreEqual(2, data.Count); Assert.AreEqual("b", data[1]); Assert.AreEqual("b", data[2]); Assert.IsTrue(data.TryRemove(1, ref update)); Assert.AreEqual("b", update.OldValue); Assert.IsTrue(data.TryRemove(2, ref update)); Assert.AreEqual("b", update.OldValue); Assert.AreEqual(0, data.Count); } }
public ContentStorage(string directory, bool asReadonly) { _disposables = new DisposingList(); _readonly = asReadonly; _storageDir = directory; _dataDir = Path.Combine(directory, "content"); if (!_readonly && !Directory.Exists(_dataDir)) { Directory.CreateDirectory(_dataDir); } _indexDir = Path.Combine(directory, "index"); BPlusTree <string, ContentRecord> .Options options = new BPlusTree <string, ContentRecord> .Options( PrimitiveSerializer.Instance, new ProtoSerializer <ContentRecord, ContentRecord.Builder>() ); options.CacheKeepAliveMaximumHistory = 1000; options.CacheKeepAliveMinimumHistory = 100; options.CacheKeepAliveTimeout = int.MaxValue; options.CachePolicy = asReadonly ? CachePolicy.All : CachePolicy.Recent; options.CreateFile = asReadonly ? CreatePolicy.Never : CreatePolicy.IfNeeded; options.FileName = Path.Combine(directory, "content.index"); options.FileBlockSize = 0x02000; //8kb options.ReadOnly = asReadonly; options.CallLevelLock = asReadonly ? (ILockStrategy) new IgnoreLocking() : new SimpleReadWriteLocking(); options.LockingFactory = asReadonly ? (ILockFactory) new LockFactory <IgnoreLocking>() : new LockFactory <SimpleReadWriteLocking>(); options.CalcBTreeOrder(64, 256); _index = new BPlusTree <string, ContentRecord>(options); _disposables.Add(_index); _index.EnableCount(); }
public void TestAutoCommit() { var options = (BPlusTree <int, string> .OptionsV2)Options; options.TransactionLogLimit = 30; using (BPlusTree <int, string> tree = Create(options)) { tree.EnableCount(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Rollback(); Assert.AreEqual(0, tree.Count); tree.Add(1, "A"); tree.Add(2, "B"); //The second write exceeds 30 bytes and auto-commits tree.Rollback(); Assert.AreEqual(2, tree.Count); tree.Add(3, "C"); tree.Add(4, "D"); //The second write will commit, but not the last tree.Add(5, "E"); tree.Rollback(); Assert.AreEqual(4, tree.Count); Assert.IsFalse(tree.ContainsKey(5)); } }
void TestRandomAddRemove(int repeat, int nodesz, int size) { List <int> keysAdded = new List <int>(250000); BPlusTreeOptions <int, string> options = Options; options.LockingFactory = new IgnoreLockFactory(); Dictionary <int, string> keys = new Dictionary <int, string>(); for (; repeat > 0; repeat--) { keys.Clear(); options.BTreeOrder = nodesz; using (BPlusTree <int, string> data = Create(options)) { data.EnableCount(); AddRandomKeys(size, keys, data); IsSameList(keys, data); keysAdded.Clear(); for (int tc = 0; tc < 1; tc++) { int del = keys.Count / 3 + Random.Next(keys.Count / 3); RemoveRandomKeys(del, keys, data); IsSameList(keys, data); data.Validate(); AddRandomKeys(del, keys, data); IsSameList(keys, data); data.Validate(); } keysAdded.Clear(); foreach (KeyValuePair <int, string> kv in data) { keysAdded.Add(kv.Key); } foreach (int k in keysAdded) { Assert.IsTrue(data.Remove(k)); data.Add(k, k.ToString()); Assert.IsTrue(data.Remove(k)); string test; Assert.IsFalse(data.TryGetValue(k, out test)); Assert.IsNull(test); } } } }
public void TestReadOnlyCopy() { using (var tempFile = new TempFile()) { var options = new BPlusTree <int, string> .OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, } .CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.CreateFile = CreatePolicy.Never; readcopy.ReadOnly = true; using (var tree = new BPlusTree <int, string>(options)) { using (var copy = new BPlusTree <int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } //insert some data... tree.AddRange(MakeValues(0, 100)); using (var copy = new BPlusTree <int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } tree.Commit(); //insert some data... for (int i = 0; i < 100; i++) { tree.Remove(i); } tree.AddRange(MakeValues(1000, 1000)); using (var copy = new BPlusTree <int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(100, copy.Count); Assert.AreEqual(0, copy.First().Key); Assert.AreEqual(99, copy.Last().Key); } tree.Commit(); } } }
// key, data, added, last_used /// <summary> /// Initialize the cache. /// </summary> /// <param name="capacity">Maximum number of entries.</param> /// <param name="evictCount">Number to evict when capacity is reached.</param> /// <param name="debug">Enable or disable console debugging.</param> public LRUCacheBTree(int capacity, int evictCount, bool debug) { Capacity = capacity; EvictCount = evictCount; Debug = debug; Cache = new BPlusTree<string, Tuple<object, DateTime, DateTime>>(); Cache.EnableCount(); if (EvictCount > Capacity) { throw new ArgumentException("Evict count must be less than or equal to capacity."); } }
public BPlusTree <int, string> Create() { BPlusTree <int, string> tree = new BPlusTree <int, string>( new BPlusTree <int, string> .Options(PrimitiveSerializer.Instance, PrimitiveSerializer.Instance, Comparer <int> .Default) { BTreeOrder = 4, LockingFactory = new IgnoreLockFactory() } ); tree.EnableCount(); return(tree); }
public void TestReadOnlyCopy() { using (var tempFile = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.CreateFile = CreatePolicy.Never; readcopy.ReadOnly = true; using (var tree = new BPlusTree<int, string>(options)) { using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } //insert some data... tree.AddRange(MakeValues(0, 100)); using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(0, copy.Count); } tree.Commit(); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tree.AddRange(MakeValues(1000, 1000)); using (var copy = new BPlusTree<int, string>(readcopy)) { copy.EnableCount(); Assert.AreEqual(100, copy.Count); Assert.AreEqual(0, copy.First().Key); Assert.AreEqual(99, copy.Last().Key); } tree.Commit(); } } }
void TestRecoveryOnNew(BPlusTree <Guid, TestInfo> .OptionsV2 options, int count, int added) { BPlusTree <Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary <Guid, TestInfo> data = new Dictionary <Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree <Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable) new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No data... yet... using (TempFile testempty = TempFile.FromCopy(options.FileName)) { var testoptions = options.Clone(); testoptions.TransactionLogFileName = null; testoptions.TransactionLog = null; testoptions.FileName = testempty.TempPath; using (var empty = new BPlusTree <Guid, TestInfo>(testoptions)) { empty.EnableCount(); Assert.AreEqual(0, empty.Count); } } //Now recover... using (var recovered = new BPlusTree <Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) { tree.Dispose(); } } }
public void TestEnumeration() { var options = Options; options.BTreeOrder = 4; using (BPlusTree <int, string> data = new BPlusTree <int, string>(options)) { data.EnableCount(); data.DebugSetOutput(new StringWriter()); data.DebugSetValidateOnCheckpoint(true); for (int id = 0; id < 10; id++) { data.Add(id, id.ToString()); } using (IEnumerator <KeyValuePair <int, string> > enu = data.GetEnumerator()) { Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(0, enu.Current.Key); for (int id = 2; id < 10; id++) { Assert.IsTrue(data.Remove(id)); } for (int id = 6; id < 11; id++) { data.Add(id, id.ToString()); } Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(1, enu.Current.Key); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(6, enu.Current.Key); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(7, enu.Current.Key); Assert.IsTrue(data.Remove(8)); Assert.IsTrue(data.Remove(9)); Assert.IsTrue(data.Remove(10)); data.Add(11, 11.ToString()); Assert.IsTrue(enu.MoveNext()); Assert.AreEqual(11, enu.Current.Key); Assert.IsTrue(false == enu.MoveNext()); } data.Clear(); } }
public BPlusTree <KeyInfo, DataValue> Create() { BPlusTree <KeyInfo, DataValue> .Options options = new BPlusTree <KeyInfo, DataValue> .Options(new KeyInfoSerializer(), new DataValueSerializer(), new KeyInfoComparer()) { MinimumChildNodes = 16, MaximumChildNodes = 24, MinimumValueNodes = 4, MaximumValueNodes = 12, }; BPlusTree <KeyInfo, DataValue> tree = new BPlusTree <KeyInfo, DataValue>(options); tree.EnableCount(); return(tree); }
public void TestAbortWritersAndRecover() { BPlusTree <KeyInfo, DataValue> .Options options = new BPlusTree <KeyInfo, DataValue> .Options( new KeyInfoSerializer(), new DataValueSerializer(), new KeyInfoComparer()); options.CalcBTreeOrder(32, 300); options.FileName = TempFile.TempPath; options.CreateFile = CreatePolicy.Always; using (TempFile copy = new TempFile()) { copy.Delete(); int minRecordCreated = StartAndAbortWriters(options, copy); using (TempFile.Attach(copy.TempPath + ".recovered")) //used to create the new copy using (TempFile.Attach(copy.TempPath + ".deleted")) //renamed existing file { options.CreateFile = CreatePolicy.Never; int recoveredRecords = BPlusTree <KeyInfo, DataValue> .RecoverFile(options); if (recoveredRecords < RecordsCreated) { Assert.Fail("Unable to recover records, recieved ({0} of {1}).", recoveredRecords, RecordsCreated); } options.FileName = copy.TempPath; recoveredRecords = BPlusTree <KeyInfo, DataValue> .RecoverFile(options); Assert.IsTrue(recoveredRecords >= minRecordCreated, "Expected at least " + minRecordCreated + " found " + recoveredRecords); using (BPlusTree <KeyInfo, DataValue> dictionary = new BPlusTree <KeyInfo, DataValue>(options)) { dictionary.EnableCount(); Assert.AreEqual(recoveredRecords, dictionary.Count); foreach (KeyValuePair <KeyInfo, DataValue> kv in dictionary) { Assert.AreEqual(kv.Key.UID, kv.Value.Key.UID); dictionary.Remove(kv.Key); } Assert.AreEqual(0, dictionary.Count); } } } }
public void TestConcurrency() { mreStop.Reset(); using (TempFile temp = new TempFile()) { BPlusTree <Guid, TestInfo> .OptionsV2 options = new BPlusTree <Guid, TestInfo> .OptionsV2( PrimitiveSerializer.Guid, new TestInfoSerializer()); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = temp.TempPath; using (BPlusTree <Guid, TestInfo> tree = new BPlusTree <Guid, TestInfo>(options)) { tree.EnableCount(); var actions = new List <IAsyncResult>(); var tests = new Action <BPlusTree <Guid, TestInfo> >[] { DeleteStuff, UpdateStuff, AddStuff, AddRanges, BulkyInserts, FetchStuff, FetchStuff, FetchStuff, FetchStuff, FetchStuff }; foreach (var t in tests) { actions.Add(t.BeginInvoke(tree, null, null)); } const int waitIterations = 8; // wait for n seconds int timesWaited = 0; do { Trace.TraceInformation("Dictionary.Count = {0}", tree.Count); Thread.Sleep(1000); timesWaited++; } while (timesWaited < waitIterations && Debugger.IsAttached); mreStop.Set(); for (int i = 0; i < actions.Count; i++) { tests[i].EndInvoke(actions[i]); } Trace.TraceInformation("Dictionary.Count = {0}", tree.Count); } } }
public BPlusTree <int, string> Create() { TempFile = TempFile ?? new TempFile(); BPlusTree <int, string> tree = new BPlusTree <int, string>( new BPlusTree <int, string> .Options(PrimitiveSerializer.Instance, PrimitiveSerializer.Instance, Comparer <int> .Default) { BTreeOrder = 16, LockingFactory = new IgnoreLockFactory(), FileName = TempFile.TempPath, CreateFile = CreatePolicy.Always, } ); tree.EnableCount(); return(tree); }
public void TestAtomicAdd() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); int[] counter = new int[] { -1 }; for (int i = 0; i < 100; i++) { Assert.IsTrue(data.TryAdd(i, k => (++counter[0]).ToString())); } Assert.AreEqual(100, data.Count); Assert.AreEqual(100, counter[0] + 1); //Inserts of existing keys will not call method Assert.IsFalse(data.TryAdd(50, k => { throw new InvalidOperationException(); })); Assert.AreEqual(100, data.Count); } }
public BPlusTree <int, string> Create() { if (_tempFile != null) { _tempFile.Dispose(); } _tempFile = new TempFile(); BPlusTree <int, string> tree = new BPlusTree <int, string>( new BPlusTree <int, string> .OptionsV2(PrimitiveSerializer.Instance, PrimitiveSerializer.Instance) { CreateFile = CreatePolicy.Always, FileName = _tempFile.TempPath, }.CalcBTreeOrder(4, 10) ); tree.EnableCount(); return(tree); }
public void TestConcurrency() { mreStop.Reset(); using(TempFile temp = new TempFile()) { BPlusTree<Guid, TestInfo>.OptionsV2 options = new BPlusTree<Guid, TestInfo>.OptionsV2( PrimitiveSerializer.Guid, new TestInfoSerializer()); options.CalcBTreeOrder(16, 24); options.CreateFile = CreatePolicy.Always; options.FileName = temp.TempPath; using (BPlusTree<Guid, TestInfo> tree = new BPlusTree<Guid, TestInfo>(options)) { tree.EnableCount(); var actions = new List<IAsyncResult>(); var tests = new Action<BPlusTree<Guid, TestInfo>>[] { DeleteStuff, UpdateStuff, AddStuff, AddRanges, BulkyInserts, FetchStuff, FetchStuff, FetchStuff, FetchStuff, FetchStuff }; foreach (var t in tests) actions.Add(t.BeginInvoke(tree, null, null)); const int waitIterations = 8; // wait for n seconds int timesWaited = 0; do { Trace.TraceInformation("Dictionary.Count = {0}", tree.Count); Thread.Sleep(1000); timesWaited++; } while (timesWaited<waitIterations && Debugger.IsAttached); mreStop.Set(); for (int i = 0; i < actions.Count; i++) { tests[i].EndInvoke(actions[i]); } Trace.TraceInformation("Dictionary.Count = {0}", tree.Count); } } }
public void TestInserts() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); int[][] TestArrays = new int[][] { new int[] { 10, 18, 81, 121, 76, 31, 250, 174, 24, 38, 246, 79 }, new int[] { 110, 191, 84, 218, 170, 217, 199, 232, 184, 254, 32, 90, 241, 136, 181, 28, 226, 69, 52 }, }; foreach (int[] arry in TestArrays) { data.Clear(); Assert.AreEqual(0, data.Count); int count = 0; foreach (int id in arry) { data.Add(id, id.ToString()); Assert.AreEqual(++count, data.Count); } Assert.AreEqual(arry.Length, data.Count); data.UnloadCache(); foreach (int id in arry) { Assert.AreEqual(id.ToString(), data[id]); data[id] = String.Empty; Assert.AreEqual(String.Empty, data[id]); Assert.IsTrue(data.Remove(id)); Assert.AreEqual(--count, data.Count); } Assert.AreEqual(0, data.Count); } } }
public void TestCounts() { using (BPlusTree <int, string> data = Create(Options)) { Assert.AreEqual(int.MinValue, data.Count); data.EnableCount(); Assert.AreEqual(0, data.Count); Assert.IsTrue(data.TryAdd(1, "test")); Assert.AreEqual(1, data.Count); Assert.IsTrue(data.TryAdd(2, "test")); Assert.AreEqual(2, data.Count); Assert.IsFalse(data.TryAdd(2, "test")); Assert.AreEqual(2, data.Count); Assert.IsTrue(data.Remove(1)); Assert.AreEqual(1, data.Count); Assert.IsTrue(data.Remove(2)); Assert.AreEqual(0, data.Count); } }
public void TestConditionalRemove() { using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); for (int i = 0; i < 100; i++) { data.Add(i, i.ToString()); } for (int i = 0; i < 100; i++) { Assert.AreEqual((i & 1) == 1, data.TryRemove(i, (k, v) => (int.Parse(v) & 1) == 1)); } Assert.AreEqual(50, data.Count); for (int i = 0; i < 100; i++) { Assert.AreEqual((i % 10) == 0, data.TryRemove(i, (k, v) => (int.Parse(v) % 10) == 0)); } Assert.AreEqual(40, data.Count); } }
public void RandomSequenceTest() { int iterations = 5; int limit = 255; using (BPlusTree <int, string> data = Create(Options)) { data.EnableCount(); List <int> numbers = new List <int>(); while (iterations-- > 0) { data.Clear(); numbers.Clear(); data.DebugSetValidateOnCheckpoint(true); for (int i = 0; i < limit; i++) { int id = Random.Next(limit); if (!numbers.Contains(id)) { numbers.Add(id); data.Add(id, "V" + id); } } Assert.AreEqual(numbers.Count, data.Count); foreach (int number in numbers) { Assert.IsTrue(data.Remove(number)); } Assert.AreEqual(0, data.Count); } } }
public void TestRestoreLargeLog() { using (TempFile savelog = new TempFile()) using (TempFile temp = new TempFile()) { var options = GetOptions(temp); options.FileBlockSize = 512; options.StoragePerformance = StoragePerformance.Fastest; options.CalcBTreeOrder(Marshal.SizeOf(typeof(Guid)), Marshal.SizeOf(typeof(TestInfo))); options.TransactionLog = new TransactionLog<Guid, TestInfo>( new TransactionLogOptions<Guid, TestInfo>( options.TransactionLogFileName, options.KeySerializer, options.ValueSerializer ) ); //Now recover... Dictionary<Guid, TestInfo> first = new Dictionary<Guid, TestInfo>(); Dictionary<Guid, TestInfo> sample; using (var tree = new BPlusTree<Guid, TestInfo>(options)) { tree.EnableCount(); Insert(tree, first, 1, 100, TimeSpan.FromMinutes(1)); tree.Commit(); Assert.AreEqual(100, tree.Count); sample = new Dictionary<Guid, TestInfo>(first); Insert(tree, sample, 7, 5000, TimeSpan.FromMinutes(1)); Assert.AreEqual(35100, tree.Count); for (int i = 0; i < 1; i++) { foreach (var rec in tree) { var value = rec.Value; value.UpdateCount++; value.ReadCount++; tree[rec.Key] = value; } } File.Copy(options.TransactionLog.FileName, savelog.TempPath, true); tree.Rollback(); TestInfo.AssertEquals(first, tree); } //file still has initial committed data TestInfo.AssertEquals(first, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); //restore the log and verify all data. File.Copy(savelog.TempPath, options.TransactionLog.FileName, true); using (var tree = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(sample, tree); } //file still has initial committed data TestInfo.AssertEquals(sample, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); } }
/// <summary> /// Clear the cache. /// </summary> public void Clear() { lock (CacheLock) { Cache = new BPlusTree<string, Tuple<object, DateTime, DateTime>>(); Cache.EnableCount(); return; } }
public void TestSyncFromLogging() { using (var tempFile = new TempFile()) using (var logfile = new TempFile()) using (var tempCopy = new TempFile()) { var options = new BPlusTree <int, string> .OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, TransactionLogFileName = logfile.TempPath, } .CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.FileName = tempCopy.TempPath; readcopy.StoragePerformance = StoragePerformance.Fastest; using (var tree = new BPlusTree <int, string>(options)) using (var copy = new BPlusTree <int, string>(readcopy)) using (var tlog = new TransactionLog <int, string>( new TransactionLogOptions <int, string>(logfile.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String) { ReadOnly = true })) { tree.Add(0, "0"); tree.Commit(); long logpos = 0; copy.EnableCount(); //start by copying the data from tree's file into the copy instance: copy.BulkInsert( BPlusTree <int, string> .EnumerateFile(options), new BulkInsertOptions { InputIsSorted = true, CommitOnCompletion = false, ReplaceContents = true } ); Assert.AreEqual(1, copy.Count); Assert.AreEqual("0", copy[0]); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1, copy.Count); //insert some data... tree.AddRange(MakeValues(1, 99)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(100, copy.Count); //insert some data... for (int i = 0; i < 100; i++) { tree.Remove(i); } tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(0, copy.Count); tree.AddRange(MakeValues(1000, 1000)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1000, copy.Count); } } }
private static void VerifyDictionary(Dictionary<int, string> expected, BPlusTree<int, string> tree) { tree.Validate(); tree.EnableCount(); Dictionary<int, string> test = new Dictionary<int, string>(expected); List<KeyValuePair<int, string>> pairs = new List<KeyValuePair<int, string>>(test); string val; foreach (KeyValuePair<int, string> pair in tree) { Assert.IsTrue(test.TryGetValue(pair.Key, out val)); Assert.AreEqual(pair.Value, val); Assert.IsTrue(test.Remove(pair.Key)); } Assert.AreEqual(0, test.Count); test = null; Assert.IsNull(test); Assert.AreEqual(pairs.Count, tree.Count); foreach (KeyValuePair<int, string> pair in pairs) { Assert.IsTrue(tree.TryGetValue(pair.Key, out val)); Assert.AreEqual(pair.Value, val); } }
public virtual void Initialize() { #region Main Tree Initialization var treeOptions = new BPlusTree <AttributeValue, long> .OptionsV2(AttributeValueSerializer.Global, new PrimitiveSerializer()); if (_configuration.JournalEnabled) { var transactionLogOptions = new TransactionLogOptions <AttributeValue, long>(_path + ".tlog", new AttributeValueSerializer(), new PrimitiveSerializer()); transactionLogOptions.FileOptions = FileOptions.WriteThrough; transactionLogOptions.FileBuffer = 4096; treeOptions.TransactionLog = new TransactionLog <AttributeValue, long>(transactionLogOptions); treeOptions.TransactionLogFileName = transactionLogOptions.FileName; treeOptions.StoragePerformance = StoragePerformance.LogFileNoCache; } else { treeOptions.StoragePerformance = StoragePerformance.LogFileInCache; } if (_configuration.CachePolicy != null) { switch (_configuration.CachePolicy.ToLower()) { case "all": treeOptions.CachePolicy = CachePolicy.All; break; case "none": treeOptions.CachePolicy = CachePolicy.None; break; default: treeOptions.CachePolicy = CachePolicy.Recent; break; } } else { treeOptions.CachePolicy = CachePolicy.Recent; } treeOptions.FileName = _path; treeOptions.StorageType = StorageType.Disk; treeOptions.CreateFile = CreatePolicy.IfNeeded; treeOptions.BTreeOrder = 64; treeOptions.LockingFactory = new IgnoreLockFactory(); treeOptions.CallLevelLock = new IgnoreLocking(); treeOptions.StoragePerformance = StoragePerformance.Default; try { _tree = new BPlusTree <AttributeValue, long>(treeOptions); _tree.EnableCount(); if (LoggerManager.Instance.IndexLogger != null && LoggerManager.Instance.IndexLogger.IsInfoEnabled) { LoggerManager.Instance.IndexLogger.Info("BPlusIndex", "Index (s) " + _indexKey.ToString() + " defined"); } } catch (Exception ex) { if (LoggerManager.Instance.IndexLogger != null) { LoggerManager.Instance.IndexLogger.Error("BPlusIndex", "Error: " + ErrorCodes.Indexes.TREE_INITIALIZATION_FAILURE + " - Failed to initialize Index for attribute(s) " + _indexKey.ToString() + Environment.NewLine + ex.ToString()); throw new IndexException(ErrorCodes.Indexes.TREE_INITIALIZATION_FAILURE); } } #endregion #region Transitory Tree Initialization var _transitoryTreeOps = new BPlusTree <AttributeValue, IndexOp <long> > .Options(AttributeValueSerializer.Global, IndexOpSerializer <long> .Global); _transitoryTreeOps.StorageType = StorageType.Memory; _transitoryTreeOps.LockingFactory = new LockFactory <ReaderWriterLocking>(); _transitoryTreeOps.CallLevelLock = new ReaderWriterLocking(); _transitionTree = new BPlusTree <AttributeValue, IndexOp <long> >(_transitoryTreeOps); opsToCommit = new ConcurrentDictionary <long, IList <IndexOp <long> > >(); appliedOps = new ClusteredList <long>(); #endregion _bounds = new BoundingBox(1); try { RestoreBoundsFromTree(); } catch (Exception ex) { if (LoggerManager.Instance.IndexLogger != null) { LoggerManager.Instance.IndexLogger.Error("BPlusIndex", "Error: " + ErrorCodes.Indexes.NUMERIC_BOUNDS_CALCULATION_FAILURE + " - Failed to calculate numeric bounds of Index for attribute(s) " + _indexKey.ToString() + Environment.NewLine + ex); throw new IndexException(ErrorCodes.Indexes.NUMERIC_BOUNDS_CALCULATION_FAILURE); } } }
void TestRecoveryOnNew(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> data = new Dictionary<Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable)new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No data... yet... using(TempFile testempty = TempFile.FromCopy(options.FileName)) { var testoptions = options.Clone(); testoptions.TransactionLogFileName = null; testoptions.TransactionLog = null; testoptions.FileName = testempty.TempPath; using (var empty = new BPlusTree<Guid, TestInfo>(testoptions)) { empty.EnableCount(); Assert.AreEqual(0, empty.Count); } } //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
public void TestRestoreLargeLog() { using (TempFile savelog = new TempFile()) using (TempFile temp = new TempFile()) { var options = GetOptions(temp); options.FileBlockSize = 512; options.StoragePerformance = StoragePerformance.Fastest; options.CalcBTreeOrder(Marshal.SizeOf(typeof(Guid)), Marshal.SizeOf(typeof(TestInfo))); options.TransactionLog = new TransactionLog <Guid, TestInfo>( new TransactionLogOptions <Guid, TestInfo>( options.TransactionLogFileName, options.KeySerializer, options.ValueSerializer ) ); //Now recover... Dictionary <Guid, TestInfo> first = new Dictionary <Guid, TestInfo>(); Dictionary <Guid, TestInfo> sample; using (var tree = new BPlusTree <Guid, TestInfo>(options)) { tree.EnableCount(); Insert(tree, first, 1, 100, TimeSpan.FromMinutes(1)); tree.Commit(); Assert.AreEqual(100, tree.Count); sample = new Dictionary <Guid, TestInfo>(first); Insert(tree, sample, 7, 5000, TimeSpan.FromMinutes(1)); Assert.AreEqual(35100, tree.Count); for (int i = 0; i < 1; i++) { foreach (var rec in tree) { var value = rec.Value; value.UpdateCount++; value.ReadCount++; tree[rec.Key] = value; } } File.Copy(options.TransactionLog.FileName, savelog.TempPath, true); tree.Rollback(); TestInfo.AssertEquals(first, tree); } //file still has initial committed data TestInfo.AssertEquals(first, BPlusTree <Guid, TestInfo> .EnumerateFile(options)); //restore the log and verify all data. File.Copy(savelog.TempPath, options.TransactionLog.FileName, true); using (var tree = new BPlusTree <Guid, TestInfo>(options)) { TestInfo.AssertEquals(sample, tree); } //file still has initial committed data TestInfo.AssertEquals(sample, BPlusTree <Guid, TestInfo> .EnumerateFile(options)); } }
public void TestSyncFromLogging() { using (var tempFile = new TempFile()) using (var logfile = new TempFile()) using (var tempCopy = new TempFile()) { var options = new BPlusTree<int, string>.OptionsV2(new PrimitiveSerializer(), new PrimitiveSerializer()) { CreateFile = CreatePolicy.Always, FileName = tempFile.TempPath, TransactionLogFileName = logfile.TempPath, }.CalcBTreeOrder(4, 10); var readcopy = options.Clone(); readcopy.FileName = tempCopy.TempPath; readcopy.StoragePerformance = StoragePerformance.Fastest; using (var tree = new BPlusTree<int, string>(options)) using (var copy = new BPlusTree<int, string>(readcopy)) using (var tlog = new TransactionLog<int, string>( new TransactionLogOptions<int, string>(logfile.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String) { ReadOnly = true })) { tree.Add(0, "0"); tree.Commit(); long logpos = 0; copy.EnableCount(); //start by copying the data from tree's file into the copy instance: copy.BulkInsert( BPlusTree<int, string>.EnumerateFile(options), new BulkInsertOptions { InputIsSorted = true, CommitOnCompletion = false, ReplaceContents = true } ); Assert.AreEqual(1, copy.Count); Assert.AreEqual("0", copy[0]); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1, copy.Count); //insert some data... tree.AddRange(MakeValues(1, 99)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(100, copy.Count); //insert some data... for (int i = 0; i < 100; i++) tree.Remove(i); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(0, copy.Count); tree.AddRange(MakeValues(1000, 1000)); tlog.ReplayLog(copy, ref logpos); Assert.AreEqual(1000, copy.Count); } } }