public void CompactionLimitsKviWriteSpeed() { using var fileCollection = new InMemoryFileCollection(); var logger = new LoggerMock(); using var db = NewKeyValueDB(new KeyValueDBOptions { FileCollection = fileCollection, Compression = new NoCompressionStrategy(), CompactorScheduler = null, CompactorWriteBytesPerSecondLimit = 20000, FileSplitSize = 60000, Logger = logger }); using (var tr = db.StartTransaction()) { var key = new byte[100]; for (var i = 0; i < 256; i++) { for (var j = 0; j < 100; j++) { key[j] = (byte)i; } tr.CreateOrUpdateKeyValue(key, key); } tr.Commit(); } db.Compact(CancellationToken.None); // Kvi size = 27640 => ~1.4s Assert.InRange(logger.KviTime.TotalMilliseconds, 1000, 2000); }
public void CanRollbackToStartIfNoTrlMissing() { using (var fileCollection = new InMemoryFileCollection()) { using (var kv = new KeyValueDB(new KeyValueDBOptions { FileCollection = fileCollection, FileSplitSize = 1024, Compression = new NoCompressionStrategy() })) { for (var i = 1; i < 60; i++) { using (var tr = kv.StartTransaction()) { var key = new byte[4]; BTDB.Buffer.PackUnpack.PackInt32BE(key, 0, i); tr.CreateOrUpdateKeyValueUnsafe(key, new byte[200]); tr.SetCommitUlong((ulong)i); tr.Commit(); } if (i % 5 == 0) { kv.Compact(new System.Threading.CancellationToken()); } if (i == 50) { kv.PreserveHistoryUpToCommitUlong = (ulong)i; } } } using (var kv = new KeyValueDB(new KeyValueDBOptions { FileCollection = fileCollection, FileSplitSize = 1024, OpenUpToCommitUlong = 0, Compression = new NoCompressionStrategy() })) { using (var tr = kv.StartTransaction()) { Assert.Equal(0, tr.GetKeyValueCount()); } } // Again after open with OpenUpToCommitUlong you lost option to replay old history using (var kv = new KeyValueDB(new KeyValueDBOptions { FileCollection = fileCollection, FileSplitSize = 1024, OpenUpToCommitUlong = 1, Compression = new NoCompressionStrategy() })) { using (var tr = kv.StartTransaction()) { Assert.Equal(0, tr.GetKeyValueCount()); } } } }
public BenchmarkRelationPartialView() { _fc = new InMemoryFileCollection(); var lowDb = new KeyValueDB(_fc); _db = new ObjectDB(); _db.Open(lowDb, true); using var tr = _db.StartTransaction(); var table = tr.GetRelation <IPersonTable>(); for (var i = 0; i < 10000; i++) { var p = new Person { ParentId = 1, PersonId = i, Age = (ulong)(i / 128), Name = "Lorem ipsum " + i, Children = Enumerable.Range(0, 100).Select(j => new Person { ParentId = i, PersonId = i * 100 + j, Name = "Lorem ipsum child " + j, Age = (ulong)j }).ToList() }; table.Upsert(p); } tr.Commit(); _tr = _db.StartReadOnlyTransaction(); _table = _tr.GetRelation <IPersonTable>(); }
public void EmptyWritingTransaction() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); using var tr = db.StartWritingTransaction().Result; tr.Commit(); }
public void SetKeyIndexWorks() { using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { var key = new byte[2]; const int keysCreated = 10000; using (var tr = db.StartTransaction()) { for (int i = 0; i < keysCreated; i++) { key[0] = (byte)(i / 256); key[1] = (byte)(i % 256); tr.CreateKey(key); } tr.Commit(); } using (var tr = db.StartTransaction()) { Assert.False(tr.SetKeyIndex(keysCreated)); for (int i = 0; i < keysCreated; i += 5) { Assert.True(tr.SetKeyIndex(i)); key = tr.GetKeyAsByteArray(); Assert.Equal((byte)(i / 256), key[0]); Assert.Equal((byte)(i % 256), key[1]); Assert.Equal(i, tr.GetKeyIndex()); } } } }
public void AddingContinueToNewFileAfterReopenWithCorruption() { using (var fileCollection = new InMemoryFileCollection()) { using (IKeyValueDB db = new KeyValueDB(fileCollection)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(_key1, _key1); tr.Commit(); } } fileCollection.SimulateCorruptionBySetSize(20 + 16); using (IKeyValueDB db = new KeyValueDB(fileCollection)) { using (var tr = db.StartTransaction()) { Assert.Equal(0, tr.GetKeyValueCount()); tr.CreateOrUpdateKeyValue(Key2, Key2); tr.Commit(); } Console.WriteLine(db.CalcStats()); } Assert.True(2 <= fileCollection.GetCount()); } }
public void FastCleanUpOnStartRemovesUselessFiles() { using (var fileCollection = new InMemoryFileCollection()) { using (var db = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(_key1, new byte[1024]); tr.CreateOrUpdateKeyValue(Key2, new byte[1024]); tr.Commit(); } using (var tr = db.StartTransaction()) { tr.EraseAll(); tr.Commit(); } Assert.Equal(3u, fileCollection.GetCount()); // 3 Logs } using (var db = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { Console.WriteLine(db.CalcStats()); Assert.Equal(2u, fileCollection.GetCount()); // 1 Log, 1 KeyIndex } } }
public void CreateEmptyDatabase() { using (var fileCollection = new InMemoryFileCollection()) using (new KeyValueDB(fileCollection)) { } }
public void AddingContinueToSameFileAfterReopenOfDBWith2TransactionLogFiles() { using (var fileCollection = new InMemoryFileCollection()) { using (IKeyValueDB db = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(_key1, new byte[1024]); tr.CreateOrUpdateKeyValue(Key2, new byte[10]); tr.Commit(); } } Assert.Equal(2u, fileCollection.GetCount()); using (IKeyValueDB db = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(Key2, new byte[1024]); tr.CreateOrUpdateKeyValue(_key3, new byte[10]); tr.Commit(); } } Assert.Equal(4u, fileCollection.GetCount()); using (IKeyValueDB db = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(Key2, Key2); tr.Commit(); } } Assert.Equal(4u, fileCollection.GetCount()); } }
public void PreapprovedCommitAndCompaction() { using (var fileCollection = new InMemoryFileCollection()) { using (var db = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { using (var tr = db.StartWritingTransaction().Result) { tr.CreateOrUpdateKeyValue(_key1, new byte[1024]); tr.CreateOrUpdateKeyValue(Key2, new byte[10]); tr.Commit(); } db.Compact(new CancellationToken()); using (var tr = db.StartWritingTransaction().Result) { tr.EraseRange(0, 0); tr.Commit(); } db.Compact(new CancellationToken()); using (var db2 = new KeyValueDB(fileCollection, new NoCompressionStrategy(), 1024)) { using (var tr = db2.StartTransaction()) { Assert.False(tr.FindExactKey(_key1)); Assert.True(tr.FindExactKey(Key2)); } } } } }
public void CompactionStabilizedEvenWithOldTransactions() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection, new NoCompressionStrategy(), 10240, null); using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(_key1, new byte[4000]); tr.CreateOrUpdateKeyValue(Key2, new byte[4000]); tr.Commit(); } using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(_key3, new byte[4000]); // creates new Log tr.FindExactKey(_key1); tr.EraseCurrent(); tr.Commit(); } var longTr = db.StartTransaction(); db.Compact(new CancellationToken()); Assert.Equal(4u, fileCollection.GetCount()); // 2 Logs, 1 values, 1 KeyIndex db.Compact(new CancellationToken()); Assert.Equal(4u, fileCollection.GetCount()); // 2 Logs, 1 values, 1 KeyIndex longTr.Dispose(); db.Compact(new CancellationToken()); Assert.Equal(3u, fileCollection.GetCount()); // 1 Log, 1 values, 1 KeyIndex }
public void StartWritingTransactionWorks() { using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { var tr1 = db.StartWritingTransaction().Result; var tr2Task = db.StartWritingTransaction(); var task = Task.Factory.StartNew(() => { var tr2 = tr2Task.Result; Assert.True(tr2.FindExactKey(_key1)); tr2.CreateKey(Key2); tr2.Commit(); tr2.Dispose(); }); tr1.CreateKey(_key1); tr1.Commit(); tr1.Dispose(); task.Wait(1000); using (var tr = db.StartTransaction()) { Assert.True(tr.FindExactKey(_key1)); Assert.True(tr.FindExactKey(Key2)); } } }
public void CreateEmptyCache() { using (var fileCollection = new InMemoryFileCollection()) using (new DiskChunkCache(fileCollection, 20, 1000)) { } }
public void MultipleTransactions2(int transactionCount) { using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { var key = new byte[2 + transactionCount * 10]; for (int i = 0; i < transactionCount; i++) { key[0] = (byte)((transactionCount - i) / 256); key[1] = (byte)((transactionCount - i) % 256); using (var tr1 = db.StartTransaction()) { tr1.CreateOrUpdateKeyValue(ByteBuffer.NewSync(key, 0, 2 + i * 10), ByteBuffer.NewEmpty()); if (i % 100 == 0 || i == transactionCount - 1) { for (int j = 0; j < i; j++) { key[0] = (byte)((transactionCount - j) / 256); key[1] = (byte)((transactionCount - j) % 256); Assert.Equal(FindResult.Exact, tr1.Find(ByteBuffer.NewSync(key, 0, 2 + j * 10))); } } tr1.Commit(); } } } }
public async Task GettingContentMakesItStayLongerDecreasingRate() { using (var fileCollection = new InMemoryFileCollection()) { const int cacheCapacity = 50000; using (var cache = new DiskChunkCache(fileCollection, 20, cacheCapacity)) { for (var i = 0; i < 80; i++) { Put(cache, i); for (var j = 0; j < 79 - i; j++) { Get(cache, i); } if (CalcLength(fileCollection) <= cacheCapacity) { continue; } await FinishCompactTask(cache); Assert.True(CalcLength(fileCollection) <= cacheCapacity); } _output.WriteLine(cache.CalcStats()); Assert.True(Get(cache, 0)); Assert.False(Get(cache, 60)); } } }
public void AccessEveryTenthTenTimesMoreMakesItStay() { using (var fileCollection = new InMemoryFileCollection()) { const int cacheCapacity = 50000; using (var cache = new DiskChunkCache(fileCollection, 20, cacheCapacity)) { for (var i = 0; i < 46; i++) { Put(cache, i); for (var j = 0; j < (i % 5 == 0 ? 10 + i : 1); j++) { Get(cache, i); } if (i == 42) { Thread.Sleep(500); } Assert.True(fileCollection.Enumerate().Sum(f => (long)f.GetSize()) <= cacheCapacity); } _output.WriteLine(cache.CalcStats()); Assert.True(Get(cache, 0)); Assert.False(Get(cache, 1)); } } }
public void AddingContinueToSameFileAfterReopen() { using var fileCollection = new InMemoryFileCollection(); using (var db = NewKeyValueDB(fileCollection)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(_key1, _key1); tr.Commit(); } } using (var db = NewKeyValueDB(fileCollection)) { using (var tr = db.StartTransaction()) { tr.CreateOrUpdateKeyValue(Key2, Key2); tr.Commit(); } _testOutputHelper.WriteLine(db.CalcStats()); } Assert.Equal(1u, fileCollection.GetCount()); // Log }
public void ReadOnlyTransactionThrowsOnWriteAccess() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); using var tr = db.StartReadOnlyTransaction(); Assert.Throws <BTDBTransactionRetryException>(() => tr.CreateKey(new byte[1])); }
public void VeryLongKeys() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); var key = new byte[200000]; var value = new byte[100]; using (var tr = db.StartTransaction()) { for (byte i = 0; i < 250; i++) { key[100000] = i; tr.CreateOrUpdateKeyValue(key, value); } tr.Commit(); } using (var tr = db.StartTransaction()) { for (byte i = 0; i < 250; i++) { key[100000] = i; Assert.True(tr.FindExactKey(key)); tr.EraseCurrent(); } } }
public void BiggerKey(int prefixLength, int offsetKey, int keyLength) { var prefix = new byte[prefixLength]; var keyb = new byte[offsetKey + keyLength]; for (int i = offsetKey; i < offsetKey + keyLength; i++) { keyb[i] = (byte)i; } var key = ByteBuffer.NewAsync(keyb, offsetKey, keyLength); using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { using (var tr1 = db.StartTransaction()) { tr1.SetKeyPrefix(prefix); tr1.CreateOrUpdateKeyValue(key, ByteBuffer.NewEmpty()); tr1.Commit(); } using (var tr2 = db.StartTransaction()) { tr2.SetKeyPrefix(prefix); Assert.True(tr2.FindExactKey(key.ToByteArray())); Assert.Equal(key.ToByteArray(), tr2.GetKeyAsByteArray()); } } }
public void SetKeyPrefixInOneTransaction() { using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { var key = new byte[5]; var value = new byte[100]; var rnd = new Random(); using (var tr = db.StartTransaction()) { for (byte i = 0; i < 100; i++) { key[0] = i; for (byte j = 0; j < 100; j++) { key[4] = j; rnd.NextBytes(value); tr.CreateOrUpdateKeyValue(key, value); } } tr.Commit(); } using (var tr = db.StartTransaction()) { for (byte i = 0; i < 100; i++) { key[0] = i; tr.SetKeyPrefix(ByteBuffer.NewSync(key, 0, 4)); Assert.Equal(100, tr.GetKeyValueCount()); } } } }
public void TwoTransactions() { using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { using (var tr1 = db.StartTransaction()) { tr1.CreateKey(_key1); tr1.Commit(); } using (var tr2 = db.StartTransaction()) { tr2.CreateKey(Key2); Assert.True(tr2.FindExactKey(_key1)); Assert.True(tr2.FindExactKey(Key2)); Assert.False(tr2.FindExactKey(_key3)); tr2.Commit(); } using (var tr3 = db.StartTransaction()) { Assert.True(tr3.FindExactKey(_key1)); Assert.True(tr3.FindExactKey(Key2)); Assert.False(tr3.FindExactKey(_key3)); } } }
public void MultipleTransactions2(int transactionCount) { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); var key = new byte[2 + transactionCount * 10]; for (var i = 0; i < transactionCount; i++) { key[0] = (byte)((transactionCount - i) / 256); key[1] = (byte)((transactionCount - i) % 256); using var tr1 = db.StartTransaction(); tr1.CreateOrUpdateKeyValue(key.AsSpan(0, 2 + i * 10), ReadOnlySpan <byte> .Empty); if (i % 100 == 0 || i == transactionCount - 1) { for (var j = 0; j < i; j++) { key[0] = (byte)((transactionCount - j) / 256); key[1] = (byte)((transactionCount - j) % 256); Assert.Equal(FindResult.Exact, tr1.Find(key.AsSpan(0, 2 + j * 10), 0)); } } tr1.Commit(); } }
public void GetFromEmptyCacheReturnsEmptyByteBuffer() { using (var fileCollection = new InMemoryFileCollection()) using (var cache = new DiskChunkCache(fileCollection, 20, 1000)) { Assert.Equal(0, cache.Get(CalcHash(new byte[] { 0 })).Result.Length); } }
public void FirstTransaction() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); using var tr = db.StartTransaction(); Assert.True(tr.CreateOrUpdateKeyValue(_key1, ReadOnlySpan <byte> .Empty)); tr.Commit(); }
public void CompressibleValueLoad() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); using var tr = db.StartTransaction(); tr.CreateOrUpdateKeyValue(_key1, new byte[1000]); Assert.Equal(new byte[1000], tr.GetValue().ToArray()); tr.Commit(); }
public void WhatIPutICanGet() { using (var fileCollection = new InMemoryFileCollection()) using (var cache = new DiskChunkCache(fileCollection, 20, 1000)) { cache.Put(CalcHash(new byte[] { 0 }), ByteBuffer.NewAsync(new byte[] { 1 })); Assert.Equal(new byte[] { 1 }, cache.Get(CalcHash(new byte[] { 0 })).Result.ToByteArray()); } }
public void OnlyOneWritingTransactionPossible() { using var fileCollection = new InMemoryFileCollection(); using var db = NewKeyValueDB(fileCollection); using var tr1 = db.StartTransaction(); tr1.CreateKey(_key1); using var tr2 = db.StartTransaction(); Assert.False(tr2.FindExactKey(_key1)); Assert.Throws <BTDBTransactionRetryException>(() => tr2.CreateKey(Key2)); }
public void CompactorShouldNotBePessimistDespiteRunningTransactions() { using var fileCollection = new InMemoryFileCollection(); var options = new KeyValueDBOptions { Compression = new NoCompressionStrategy(), FileCollection = fileCollection, FileSplitSize = 8096, CompactorScheduler = CompactorScheduler.Instance, }; using var kvDb = new KeyValueDB(options); for (var i = 0; i < 100; i++) { using var tr = kvDb.StartWritingTransaction().Result; var key = new byte[4]; BTDB.Buffer.PackUnpack.PackInt32BE(key, 0, i); tr.CreateOrUpdateKeyValue(key, new byte[2000]); tr.Commit(); } kvDb.Compact(new CancellationToken()); var fileCountAfterFirstCompaction = fileCollection.GetCount(); using (kvDb.StartReadOnlyTransaction()) { for (var i = 0; i < 50; i++) { using var tr = kvDb.StartWritingTransaction().Result; var key = new byte[4]; BTDB.Buffer.PackUnpack.PackInt32BE(key, 0, i * 2); tr.FindExactKey(key); tr.EraseCurrent(); tr.Commit(); } while (kvDb.Compact(new CancellationToken())) { } Assert.InRange(fileCollection.GetCount(), fileCountAfterFirstCompaction + 2, fileCountAfterFirstCompaction + 50); } for (var i = 0; i < 4; i++) { using var tr = kvDb.StartWritingTransaction().Result; var key = new byte[4]; BTDB.Buffer.PackUnpack.PackInt32BE(key, 0, i); tr.CreateOrUpdateKeyValue(key, new byte[2000]); tr.Commit(); } while (kvDb.Compact(new CancellationToken())) { } Assert.InRange(fileCollection.GetCount(), fileCountAfterFirstCompaction / 3, 2 * fileCountAfterFirstCompaction / 3); }
public void EmptyWritingTransaction() { using (var fileCollection = new InMemoryFileCollection()) using (IKeyValueDB db = new KeyValueDB(fileCollection)) { using (var tr = db.StartWritingTransaction().Result) { tr.Commit(); } } }