Пример #1
0
        public T Run <T>(BenchmarkOptions <T> options)
        {
            var batchAmounts = 32;
            var batchSize    = options.BatchSize;

            List <T> batchRunResults = new List <T> {
            };

            while (true)
            {
                // TODO: maybe it's better to increase batchAmount * 2/3 at each iteration - this can help parallelization
                // TODO: maybe it's better to increase batchSize at each iteration + merge previous batches - this can reduce overall memory usage
                batchRunResults.AddRange(
                    ParallelEnumerable.Range(0, batchAmounts)
                    .Select(_ => RunOnce(options, batchSize)));

                if (!IsGoodEnough(options, batchRunResults))
                {
                    continue;
                }
                else
                {
                    break;
                }
            }

            return(batchRunResults.Aggregate(options.Combine));
        }
Пример #2
0
 private static T RunOnce <T>(BenchmarkOptions <T> options, int runs)
 {
     return(ParallelEnumerable.Range(0, runs)
            .Select(_ =>
     {
         while (true)
         {
             var runResult = options.RunOnce();
             if (runResult != null)
             {
                 return runResult;
             }
         }
     })
            .Aggregate(options.Seed, (a, b) => options.Combine(a, b)));
 }
Пример #3
0
 private static bool IsGoodEnough <T>(BenchmarkOptions <T> options, List <T> batchRunResults)
 {
     return(options.QuantifiedValues.All(valueOptions => IsGoodEnough(valueOptions, batchRunResults)));
 }