private static async Task CheckGetKeysSingleThread(string dbFolder) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder, OpenReadOnly = true }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); var key = new byte[10]; var value = new byte[100]; for (var t = 0; t < numberOfThreads; t++) { for (var i = 0; i < _keysPerThread; i++) { //if (i == 7) Debugger.Break(); generator.GetKeyValue(key, value, (short)t, i); Console.WriteLine($"Thread Id {t} iteration {i}"); await db.GetAsync(key); } } }
public void GlobalSetup() { if (Directory.Exists(_outputFolder)) { Directory.Delete(_outputFolder, true); } Directory.CreateDirectory(_outputFolder); var dbFolder = "D:\\stress"; var dbOptions = new TrimDatabaseOptions(); _cache = dbOptions.BlockCache(); var filenames = Directory.GetFiles(dbFolder); _tableFiles = new TableFile[filenames.Length]; _storageLayer = new SortedStorageLayer(5, _outputFolder, _cache, 10 * 1024 * 1024, 100); for (var i = 0; i < _tableFiles.Length; i++) { var tf = new TableFile(filenames[i], _cache); tf.LoadAsync().Wait(); _tableFiles[i] = tf; } }
//Directory.Delete(dbFolder, true); //Directory.CreateDirectory(dbFolder); //foreach(var f in Directory.GetFiles("D:\\stressbak", "*.*")) //{ // File.Copy(f, Path.Combine(dbFolder, Path.GetFileName(f))); //} //var sw = Stopwatch.StartNew(); //await WriteDB(dbFolder, disableMerging: false); ////await MergeTest(dbFolder); //sw.Stop(); //Console.WriteLine($"Total time taken was {sw.ElapsedMilliseconds}ms"); //await CheckGetKeys(dbFolder); //await CheckGetKeysSingleThread(dbFolder); //await CheckLayer(dbFolder, 2); //await SpeedTestSingleThreadedSearchFile(); private static async Task MergeTest(string dbFolder) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); }
private static async Task WriteDB(string dbFolder, bool disableMerging) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder, DisableMerging = disableMerging }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var tasks = new Task[numberOfThreads]; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); var sw = Stopwatch.StartNew(); for (var i = 0; i < numberOfThreads; i++) { tasks[i] = WriteToDB((short)i, generator, _keysPerThread, db); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds}ms"); Console.WriteLine($"Total number of keys written {_keysPerThread * numberOfThreads}"); var totalDatasize = _keysPerThread * numberOfThreads * (_keySize + _valueSize); Console.WriteLine($"Total data set {totalDatasize / 1024 / 1024 }mb"); Console.WriteLine("Waiting for db to shutdown"); //for (var i = 0; i < 10; i++) //{ // Console.WriteLine("Waiting for the merges to finish BRB"); // await Task.Delay(TimeSpan.FromSeconds(10)); //} }
public async Task TestSkipListOverflow() { var loadedWords = CommonData.Words; var folder = "D:\\Database"; foreach (var f in System.IO.Directory.GetFiles(folder)) { System.IO.File.Delete(f); } using var blocks = new MMapBlockCache(); var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = folder }; var db = new TrimDatabase(dbOptions); await db.LoadAsync(); foreach (var word in loadedWords) { var utf8 = Encoding.UTF8.GetBytes(word); var value = Encoding.UTF8.GetBytes($"VALUE={word}"); await db.PutAsync(utf8, value); } var key = Encoding.UTF8.GetBytes(loadedWords[0]); var expectedValue = Encoding.UTF8.GetBytes($"VALUE={loadedWords[0]}"); var result = await db.GetAsync(key); Assert.Equal(expectedValue.ToArray(), result.ToArray()); key = Encoding.UTF8.GetBytes(loadedWords[loadedWords.Length / 2]); expectedValue = Encoding.UTF8.GetBytes($"VALUE={loadedWords[loadedWords.Length / 2]}"); result = await db.GetAsync(key); Assert.Equal(expectedValue.ToArray(), result.ToArray()); key = Encoding.UTF8.GetBytes(loadedWords[^ 1]);
static async Task Main(string[] args) { //var merge = new MergeBenchmark(); //merge.GlobalSetup(); //await merge.MergeFiles(); //return; var summary = BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); return; var dbFolder = "D:\\stress"; var outputFolder = "D:\\stressoutput"; Directory.Delete(outputFolder, true); Directory.CreateDirectory(outputFolder); var dbOptions = new TrimDatabaseOptions(); using var blockstore = dbOptions.BlockCache(); var filenames = Directory.GetFiles(dbFolder); var files = new TableFile[filenames.Length]; for (var i = 0; i < files.Length; i++) { var tf = new TableFile(filenames[i], blockstore); await tf.LoadAsync(); files[i] = tf; } var merger = new TableFileMerger(files.Select(f => f.GetAsyncEnumerator()).ToArray()); var storageLayer = new SortedStorageLayer(5, dbFolder, blockstore, 10 * 1024 * 1024, 100); var mWriter = new TableFileMergeWriter(storageLayer, blockstore); await mWriter.WriteFromMerger(merger); }
private static async Task CheckGetKeys(string dbFolder) { var dbOptions = new TrimDatabaseOptions() { DatabaseFolder = dbFolder, OpenReadOnly = true }; await using var db = new TrimDatabase(dbOptions); await db.LoadAsync(); var numberOfThreads = Environment.ProcessorCount; var tasks = new Task[numberOfThreads]; var seed = 7722; var generator = new KeyValueGenerator(numberOfThreads, seed); Console.WriteLine("Starting the get test"); var sw = Stopwatch.StartNew(); for (var i = 0; i < numberOfThreads; i++) { tasks[i] = ReadFromDB((short)i, generator, _keysPerThread, db); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds}ms"); var timePerKey = (double)sw.ElapsedMilliseconds / (_keysPerThread * numberOfThreads); Console.WriteLine($"Time taken per key {timePerKey * 1000.0}µs"); Console.WriteLine($"Total misses {_numberOfMisses}"); }