public T Run <T>(BenchmarkOptions <T> options) { var batchAmounts = 32; var batchSize = options.BatchSize; List <T> batchRunResults = new List <T> { }; while (true) { // TODO: maybe it's better to increase batchAmount * 2/3 at each iteration - this can help parallelization // TODO: maybe it's better to increase batchSize at each iteration + merge previous batches - this can reduce overall memory usage batchRunResults.AddRange( ParallelEnumerable.Range(0, batchAmounts) .Select(_ => RunOnce(options, batchSize))); if (!IsGoodEnough(options, batchRunResults)) { continue; } else { break; } } Console.WriteLine($"Total runs: {batchRunResults.Count * batchSize}"); return(batchRunResults.Aggregate(options.Combine)); }
private static bool IsGoodEnough <T>(BenchmarkOptions <T> options, List <T> batchRunResults) { return(options.QuantifiedValues.All(valueOptions => IsGoodEnough(valueOptions, batchRunResults))); }
private static T RunOnce <T>(BenchmarkOptions <T> options, int runs) { return(ParallelEnumerable.Range(0, runs) .Select(_ => options.RunOnce()) .Aggregate(options.Seed, options.Combine)); }