private static async Task SpeedTestSingleThreadedSearchFile() { var tempPath = System.IO.Path.GetTempPath(); var fileName = System.IO.Path.Combine(tempPath, "Level1_1.trim"); var loadedWords = await System.IO.File.ReadAllLinesAsync("words.txt"); using (var blockCache = new ProtoBlockCache(10000)) { var loadedTable = new TableFile(fileName, blockCache); await loadedTable.LoadAsync(); // Check we can get the values back out var hash = new MurmurHash3(); var sw = Stopwatch.StartNew(); foreach (var word in loadedWords) { var utf8 = Encoding.UTF8.GetBytes(word); var h = hash.ComputeHash64(utf8); var result = await loadedTable.GetAsync(utf8, h); var resultAsString = Encoding.UTF8.GetString(result.Value.Span); } sw.Stop(); Console.WriteLine($"Total time taken {sw.ElapsedMilliseconds} time per key {(double)sw.ElapsedMilliseconds / loadedWords.Length}"); } }
public void GlobalSetup() { if (Directory.Exists(_outputFolder)) { Directory.Delete(_outputFolder, true); } Directory.CreateDirectory(_outputFolder); var dbFolder = "D:\\stress"; var dbOptions = new TrimDatabaseOptions(); _cache = dbOptions.BlockCache(); var filenames = Directory.GetFiles(dbFolder); _tableFiles = new TableFile[filenames.Length]; _storageLayer = new SortedStorageLayer(5, _outputFolder, _cache, 10 * 1024 * 1024, 100); for (var i = 0; i < _tableFiles.Length; i++) { var tf = new TableFile(filenames[i], _cache); tf.LoadAsync().Wait(); _tableFiles[i] = tf; } }
public void GlobalSetup() { _cache = new ProtoBlockCache(10); var tempPath = System.IO.Path.GetTempPath(); var fileName = System.IO.Path.Combine(tempPath, "Level1_1.trim"); _file = new TableFile(fileName, _cache); _file.LoadAsync().Wait(); }
public async Task WriteAndReadFile() { using var allocator = new NativeAllocator64(4096 * 10_000, 25); var memoryTable = new SkipList64(allocator); var loadedWords = CommonData.Words; foreach (var word in loadedWords) { if (string.IsNullOrEmpty(word)) { continue; } var utf8 = Encoding.UTF8.GetBytes(word); var value = Encoding.UTF8.GetBytes($"VALUE={word}"); memoryTable.Put(utf8, value); } var tempPath = System.IO.Path.GetTempPath(); var fileName = System.IO.Path.Combine(tempPath, "Level1_1.trim"); System.IO.File.Delete(fileName); var fw = new TableFileWriter(fileName); await fw.SaveMemoryTable(memoryTable); using (var blockCache = new MMapBlockCache()) { var loadedTable = new TableFile(fileName, blockCache); await loadedTable.LoadAsync(); // Check we can get the values back out var hash = new MurmurHash3(); foreach (var word in loadedWords) { var utf8 = Encoding.UTF8.GetBytes(word); var value = Encoding.UTF8.GetBytes($"VALUE={word}"); var h = hash.ComputeHash64(utf8); var result = await loadedTable.GetAsync(utf8, h); Assert.Equal(SearchResult.Found, result.Result); Assert.Equal(value, result.Value.ToArray()); } } System.IO.File.Delete(fileName); }
public async Task CheckTableIteratorWorks() { using var allocator = new NativeAllocator32(4096 * 10_000, 25); var memoryTable = new SkipList32(allocator); var loadedWords = CommonData.Words; foreach (var word in loadedWords) { if (string.IsNullOrEmpty(word)) { continue; } var utf8 = Encoding.UTF8.GetBytes(word); var value = Encoding.UTF8.GetBytes($"VALUE={word}"); memoryTable.Put(utf8, value); } var tempPath = System.IO.Path.GetTempPath(); var fileName = System.IO.Path.Combine(tempPath, "Level2_2.trim"); System.IO.File.Delete(fileName); var fw = new TableFileWriter(fileName); await fw.SaveMemoryTable(memoryTable); using (var blockCache = new MMapBlockCache()) { var loadedTable = new TableFile(fileName, blockCache); await loadedTable.LoadAsync(); var count = 0; await foreach (var item in loadedTable) { count++; var key = Encoding.UTF8.GetString(item.Key); var value = Encoding.UTF8.GetString(item.Value); Assert.Equal($"VALUE={key}", value); } Assert.Equal(CommonData.Words.Length, count); } System.IO.File.Delete(fileName); }
public static async Task WriteAndReadAsyncBlockFile(string fileName, byte[][] wordSpans) { using (var blockCache = new ProtoSharded(200)) { var loadedTable = new TableFile(fileName, blockCache); await loadedTable.LoadAsync(); var block = await blockCache.GetBlock(new Core.Storage.Blocks.FileIdentifier(1, 1), 0); using (var fs = new StreamWriter("C:\\code\\trimdb\\array.txt")) { for (var i = 0; i < block.Memory.Length; i++) { fs.Write($"{block.Memory.Span[i]},"); } } // Check we can get the values back out var hash = new MurmurHash3(); var taskList = new Task[Environment.ProcessorCount]; for (var i = 0; i < taskList.Length; i++) { taskList[i] = Task.Run(async() => { foreach (var word in wordSpans) { var h = hash.ComputeHash64(word); var result = await loadedTable.GetAsync(word, h); if (result.Result != SearchResult.Found) { throw new NotImplementedException(); } } }); } await Task.WhenAll(taskList); } }
static async Task Main(string[] args) { //var merge = new MergeBenchmark(); //merge.GlobalSetup(); //await merge.MergeFiles(); //return; var summary = BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); return; var dbFolder = "D:\\stress"; var outputFolder = "D:\\stressoutput"; Directory.Delete(outputFolder, true); Directory.CreateDirectory(outputFolder); var dbOptions = new TrimDatabaseOptions(); using var blockstore = dbOptions.BlockCache(); var filenames = Directory.GetFiles(dbFolder); var files = new TableFile[filenames.Length]; for (var i = 0; i < files.Length; i++) { var tf = new TableFile(filenames[i], blockstore); await tf.LoadAsync(); files[i] = tf; } var merger = new TableFileMerger(files.Select(f => f.GetAsyncEnumerator()).ToArray()); var storageLayer = new SortedStorageLayer(5, dbFolder, blockstore, 10 * 1024 * 1024, 100); var mWriter = new TableFileMergeWriter(storageLayer, blockstore); await mWriter.WriteFromMerger(merger); }