Executor class for running a single PerfBenchmarkAttribute Exposes the BenchmarkContext, which allows developers to register custom metrics and counters for the use of their personal benchmarks.
        [InlineData(2, 300)] // keep the values small since there's a real delay involved
        public void ShouldComputeMetricsCorrectly(int iterationCount, int millisecondRuntime)
        {
            var assertionOutput = new ActionBenchmarkOutput((report, warmup) =>
            {
                if (warmup) return;
                var counterResults = report.Metrics[CounterName];
                var projectedThroughput = 1000/(double)IterationSpeedMs; // roughly the max value of this counter
                var observedDifference =
                    Math.Abs(projectedThroughput - counterResults.MetricValuePerSecond);
                Assert.True(observedDifference <= 1.5d, $"delta between expected value and actual measured value should be <= 1.5, was {observedDifference} [{counterResults.MetricValuePerSecond} op /s]. Expected [{projectedThroughput} op /s]");
            }, results =>
            {
                var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max;
                Assert.Equal(iterationCount, counterResults);
            });

            var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty);
            var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total,
                Assertion.Empty);
            var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty);

            var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Throughput, iterationCount, millisecondRuntime,
               new[] { gcBenchmark }, new[] { memoryBenchmark }, new[] { counterBenchmark });

            var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput);

            benchmark.Run();
        }
 public void Setup()
 {
     var benchmarkData = ReflectionDiscovery.CreateBenchmarksForClass(typeof (MemoryAllocationSpec)).First();
     var settings = ReflectionDiscovery.CreateSettingsForBenchmark(benchmarkData);
     var invoker = ReflectionDiscovery.CreateInvokerForBenchmark(benchmarkData);
     _testableBenchmark = new Benchmark(settings, invoker, BenchmarkOutput);
 }
        public void ShouldComputeMetricsCorrectly(int iterationCount)
        {
            var assertionOutput = new ActionBenchmarkOutput((report, warmup) =>
            {
                if (!warmup)
                {
                    var counterResults = report.Metrics[CounterName];
                    Assert.Equal(1, counterResults.MetricValue);
                }
            }, results =>
            {
                var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max;
                Assert.Equal(iterationCount, counterResults);
            });

            var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty);
            var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total,
                Assertion.Empty);
            var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty);

            var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000,
               new[] { gcBenchmark }, new MemoryBenchmarkSetting[0], new[] {counterBenchmark});

            var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput);

            benchmark.Run();
        }
Пример #4
0
        public void Should_exit_on_first_iteration_where_Benchmark_throws_Exception()
        {
            var iterationCount = 0;
            var expectedIterationCount = 0;
            bool finalOutputCalled = false;
            IBenchmarkInvoker faultyInvoker = new ActionBenchmarkInvoker("ExceptionThrower",
                context =>
                {

                    throw new Exception("poorly written spec");
                });

            IBenchmarkOutput faultyOutput = new ActionBenchmarkOutput(
                (report, isWarmup) =>
                {
                    iterationCount++; //should run exactly once during pre-warmup
                    Assert.True(report.IsFaulted);
                }, results =>
                {
                    finalOutputCalled = true;
                    Assert.True(results.Data.IsFaulted);
                }, ActionBenchmarkOutput.DefaultWriteLineAction);

            var benchmark = new Benchmark(_faultySettings, faultyInvoker, faultyOutput);
            benchmark.Run();
            benchmark.Finish();
            Assert.Equal(expectedIterationCount, iterationCount);
            Assert.True(finalOutputCalled);
        }
Пример #5
0
        public void ShouldExecuteCorrectWarmupCount(int iterationCount)
        {
            var observedWarmupCount = -1; //we have a pre-warmup that always happens no matter what. Need to account for it.
            var assertionOutput = new ActionBenchmarkOutput((report, warmup) =>
            {
                if (warmup)
                {
                    observedWarmupCount++;
                }
            }, results =>
            {
                Assert.Equal(iterationCount, observedWarmupCount);
            });

            var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty);

            var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000,
               new List<IBenchmarkSetting>() { counterBenchmark }, new Dictionary<MetricName, MetricsCollectorSelector>() {
                   { counterBenchmark.MetricName, new CounterSelector() } });

            var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput);

            benchmark.Run();
        }