private static async Task CheckGetKeys(string dbFolder) { using var blockStore = new Core.Storage.Blocks.CachePrototype.ProtoSharded(2_560); await using var db = new TrimDatabase(() => new SkipList32(new NativeAllocator32(4096 * 1024, 25)), blockStore, 5, dbFolder, 0); await db.LoadAsync(startWithoutMerges : true); var numberOfThreads = Environment.ProcessorCount; var tasks = new Task[numberOfThreads]; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); Console.WriteLine("Starting the get test"); var sw = Stopwatch.StartNew(); for (var i = 0; i < numberOfThreads; i++) { tasks[i] = ReadFromDB((short)i, generator, _keysPerThread, db); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds}ms"); var timePerKey = (double)sw.ElapsedMilliseconds / (_keysPerThread * numberOfThreads); Console.WriteLine($"Time taken per key {timePerKey * 1000.0}µs"); Console.WriteLine($"Total misses {_numberOfMisses}"); }
private static async Task CheckGetKeysSingleThread(string dbFolder) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder, OpenReadOnly = true }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); var key = new byte[10]; var value = new byte[100]; for (var t = 0; t < numberOfThreads; t++) { for (var i = 0; i < _keysPerThread; i++) { //if (i == 7) Debugger.Break(); generator.GetKeyValue(key, value, (short)t, i); Console.WriteLine($"Thread Id {t} iteration {i}"); await db.GetAsync(key); } } }
private static async Task CheckGetKeysSingleThread(string dbFolder) { using var blockStore = new Core.Storage.Blocks.CachePrototype.ProtoSharded(2_560); await using var db = new TrimDatabase(() => new SkipList32(new NativeAllocator32(4096 * 1024, 25)), blockStore, 5, dbFolder, 0); await db.LoadAsync(startWithoutMerges : true); var numberOfThreads = Environment.ProcessorCount; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); var key = new byte[10]; var value = new byte[100]; for (var t = 0; t < numberOfThreads; t++) { for (var i = 0; i < _keysPerThread; i++) { //if (i == 7) Debugger.Break(); generator.GetKeyValue(key, value, (short)t, i); Console.WriteLine($"Thread Id {t} iteration {i}"); await db.GetAsync(key); } } }
public IOScheduler(int maxSkiplistBacklog, UnsortedStorageLayer storageLayer, TrimDatabase database) { _storageLayer = storageLayer; _database = database; _sortedStrategy = (sl) => false; _unsortedStrategy = (sl) => sl.NumberOfTables > (sl.MaxFilesAtLayer * 0.8); _channel = Channel.CreateBounded <MemoryTable>(new BoundedChannelOptions(maxSkiplistBacklog)); _writerTask = WriteInMemoryTable(); _mergeTask = CheckForMerge(); }
//Directory.Delete(dbFolder, true); //Directory.CreateDirectory(dbFolder); //foreach(var f in Directory.GetFiles("D:\\stressbak", "*.*")) //{ // File.Copy(f, Path.Combine(dbFolder, Path.GetFileName(f))); //} //var sw = Stopwatch.StartNew(); //await WriteDB(dbFolder, disableMerging: false); ////await MergeTest(dbFolder); //sw.Stop(); //Console.WriteLine($"Total time taken was {sw.ElapsedMilliseconds}ms"); //await CheckGetKeys(dbFolder); //await CheckGetKeysSingleThread(dbFolder); //await CheckLayer(dbFolder, 2); //await SpeedTestSingleThreadedSearchFile(); private static async Task MergeTest(string dbFolder) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); }
private static async Task WriteDB(string dbFolder) { System.IO.Directory.Delete(dbFolder, true); System.IO.Directory.CreateDirectory(dbFolder); using var blockStore = new Core.Storage.Blocks.CachePrototype.ProtoSharded(2_560); await using var db = new TrimDatabase(() => new SkipList32(new NativeAllocator32(4096 * 1024, 25)), blockStore, 5, dbFolder, 64 * 1024 * 1024); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var tasks = new Task[numberOfThreads]; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); var sw = Stopwatch.StartNew(); for (var i = 0; i < numberOfThreads; i++) { tasks[i] = WriteToDB((short)i, generator, _keysPerThread, db); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds}ms"); Console.WriteLine($"Total number of keys written {_keysPerThread * numberOfThreads}"); var totalDatasize = _keysPerThread * numberOfThreads * (_keySize + _valueSize); Console.WriteLine($"Total data set {totalDatasize / 1024 / 1204 }mb"); Console.WriteLine("Waiting for db to shutdown"); for (var i = 0; i < 10; i++) { Console.WriteLine("Waiting for the merges to finish BRB"); await Task.Delay(TimeSpan.FromSeconds(10)); } }
private static async Task WriteDB(string dbFolder, bool disableMerging) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder, DisableMerging = disableMerging }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var tasks = new Task[numberOfThreads]; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); var sw = Stopwatch.StartNew(); for (var i = 0; i < numberOfThreads; i++) { tasks[i] = WriteToDB((short)i, generator, _keysPerThread, db); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds}ms"); Console.WriteLine($"Total number of keys written {_keysPerThread * numberOfThreads}"); var totalDatasize = _keysPerThread * numberOfThreads * (_keySize + _valueSize); Console.WriteLine($"Total data set {totalDatasize / 1024 / 1024 }mb"); Console.WriteLine("Waiting for db to shutdown"); //for (var i = 0; i < 10; i++) //{ // Console.WriteLine("Waiting for the merges to finish BRB"); // await Task.Delay(TimeSpan.FromSeconds(10)); //} }
public async Task TestSkipListOverflow() { var loadedWords = CommonData.Words; var folder = "D:\\Database"; foreach (var f in System.IO.Directory.GetFiles(folder)) { System.IO.File.Delete(f); } using var blocks = new MMapBlockCache(); var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = folder }; var db = new TrimDatabase(dbOptions); await db.LoadAsync(); foreach (var word in loadedWords) { var utf8 = Encoding.UTF8.GetBytes(word); var value = Encoding.UTF8.GetBytes($"VALUE={word}"); await db.PutAsync(utf8, value); } var key = Encoding.UTF8.GetBytes(loadedWords[0]); var expectedValue = Encoding.UTF8.GetBytes($"VALUE={loadedWords[0]}"); var result = await db.GetAsync(key); Assert.Equal(expectedValue.ToArray(), result.ToArray()); key = Encoding.UTF8.GetBytes(loadedWords[loadedWords.Length / 2]); expectedValue = Encoding.UTF8.GetBytes($"VALUE={loadedWords[loadedWords.Length / 2]}"); result = await db.GetAsync(key); Assert.Equal(expectedValue.ToArray(), result.ToArray()); key = Encoding.UTF8.GetBytes(loadedWords[^ 1]);
private static async Task CheckGetKeys(string dbFolder) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder, OpenReadOnly = true }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var tasks = new Task[numberOfThreads]; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); Console.WriteLine("Starting the get test"); var sw = Stopwatch.StartNew(); for (var i = 0; i < numberOfThreads; i++) { tasks[i] = ReadFromDB((short)i, generator, _keysPerThread, db); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds}ms"); var timePerKey = (double)sw.ElapsedMilliseconds / (_keysPerThread * numberOfThreads); Console.WriteLine($"Time taken per key {timePerKey * 1000.0}µs"); Console.WriteLine($"Total misses {_numberOfMisses}"); }
private static async Task WriteToDB(short threadId, KeyValueGenerator generator, int numberOfIterations, TrimDatabase trimDB) { await Task.Yield(); var keyMemory = new byte[_keySize]; var valueMemory = new byte[_valueSize]; for (var i = 0; i < numberOfIterations; i++) { generator.GetKeyValue(keyMemory.AsSpan(), valueMemory.AsSpan(), threadId, numberOfIterations); await trimDB.PutAsync(keyMemory, valueMemory); } }
private static async Task ReadFromDB(short threadId, KeyValueGenerator generator, int numberOfIterations, TrimDatabase trimDB) { await Task.Yield(); var keyMemory = new byte[_keySize]; var valueMemory = new byte[_valueSize]; for (var i = 0; i < numberOfIterations; i++) { generator.GetKeyValue(keyMemory.AsSpan(), valueMemory.AsSpan(), threadId, numberOfIterations); var result = await trimDB.GetAsync(keyMemory); if (result.IsEmpty) { Interlocked.Increment(ref _numberOfMisses); } //if ((i + 1) % 10 == 0) Console.WriteLine($"Thread {threadId} has read {i + 1} keys"); //if (threadId == 3 && i > 228) Debugger.Break(); // Console.WriteLine($"ERROR Thread {threadId} has read {i + 1} keys"); } }
public TableFileMergeWriter(TrimDatabase database, StorageLayer layer, BlockCache blockCache, int lowestLevel) { _database = database; _layer = layer; _blockCache = blockCache; }