Exemple #1
0
        public T Run <T>(BenchmarkOptions <T> options)
        {
            var batchAmounts = 32;
            var batchSize    = options.BatchSize;

            List <T> batchRunResults = new List <T> {
            };

            while (true)
            {
                // TODO: maybe it's better to increase batchAmount * 2/3 at each iteration - this can help parallelization
                // TODO: maybe it's better to increase batchSize at each iteration + merge previous batches - this can reduce overall memory usage
                batchRunResults.AddRange(
                    ParallelEnumerable.Range(0, batchAmounts)
                    .Select(_ => RunOnce(options, batchSize)));

                if (!IsGoodEnough(options, batchRunResults))
                {
                    continue;
                }
                else
                {
                    break;
                }
            }
            Console.WriteLine($"Total runs: {batchRunResults.Count * batchSize}");
            return(batchRunResults.Aggregate(options.Combine));
        }
Exemple #2
0
 private static bool IsGoodEnough <T>(BenchmarkOptions <T> options, List <T> batchRunResults)
 {
     return(options.QuantifiedValues.All(valueOptions => IsGoodEnough(valueOptions, batchRunResults)));
 }
Exemple #3
0
 private static T RunOnce <T>(BenchmarkOptions <T> options, int runs)
 {
     return(ParallelEnumerable.Range(0, runs)
            .Select(_ => options.RunOnce())
            .Aggregate(options.Seed, options.Combine));
 }