An IBenchmarkOutput designed to run BenchmarkAssertions against the data we collect on each run and in the final benchmark.
상속: IBenchmarkOutput
        [InlineData(2, 300)] // keep the values small since there's a real delay involved
        public void ShouldComputeMetricsCorrectly(int iterationCount, int millisecondRuntime)
        {
            var assertionOutput = new ActionBenchmarkOutput((report, warmup) =>
            {
                if (warmup) return;
                var counterResults = report.Metrics[CounterName];
                var projectedThroughput = 1000/(double)IterationSpeedMs; // roughly the max value of this counter
                var observedDifference =
                    Math.Abs(projectedThroughput - counterResults.MetricValuePerSecond);
                Assert.True(observedDifference <= 1.5d, $"delta between expected value and actual measured value should be <= 1.5, was {observedDifference} [{counterResults.MetricValuePerSecond} op /s]. Expected [{projectedThroughput} op /s]");
            }, results =>
            {
                var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max;
                Assert.Equal(iterationCount, counterResults);
            });

            var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty);
            var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total,
                Assertion.Empty);
            var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty);

            var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Throughput, iterationCount, millisecondRuntime,
               new[] { gcBenchmark }, new[] { memoryBenchmark }, new[] { counterBenchmark });

            var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput);

            benchmark.Run();
        }
예제 #2
0
        public void Should_exit_on_first_iteration_where_Benchmark_throws_Exception()
        {
            var iterationCount = 0;
            var expectedIterationCount = 0;
            bool finalOutputCalled = false;
            IBenchmarkInvoker faultyInvoker = new ActionBenchmarkInvoker("ExceptionThrower",
                context =>
                {

                    throw new Exception("poorly written spec");
                });

            IBenchmarkOutput faultyOutput = new ActionBenchmarkOutput(
                (report, isWarmup) =>
                {
                    iterationCount++; //should run exactly once during pre-warmup
                    Assert.True(report.IsFaulted);
                }, results =>
                {
                    finalOutputCalled = true;
                    Assert.True(results.Data.IsFaulted);
                }, ActionBenchmarkOutput.DefaultWriteLineAction);

            var benchmark = new Benchmark(_faultySettings, faultyInvoker, faultyOutput);
            benchmark.Run();
            benchmark.Finish();
            Assert.Equal(expectedIterationCount, iterationCount);
            Assert.True(finalOutputCalled);
        }
        public void ShouldComputeMetricsCorrectly(int iterationCount)
        {
            var assertionOutput = new ActionBenchmarkOutput((report, warmup) =>
            {
                if (!warmup)
                {
                    var counterResults = report.Metrics[CounterName];
                    Assert.Equal(1, counterResults.MetricValue);
                }
            }, results =>
            {
                var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max;
                Assert.Equal(iterationCount, counterResults);
            });

            var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty);
            var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.Gen2, AssertionType.Total,
                Assertion.Empty);
            var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty);

            var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000,
               new List<IBenchmarkSetting>(){ gcBenchmark, counterBenchmark }, new Dictionary<MetricName, MetricsCollectorSelector>() { { gcBenchmark.MetricName, new GcCollectionsSelector() },
                   { counterBenchmark.MetricName, new CounterSelector() } });

            var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput);

            benchmark.Run();
        }
예제 #4
0
 public void BugFix153IsFixed()
 {
     var o = new ActionBenchmarkOutput(benchmarkAction: r =>
     {
         var name = new CounterMetricName(CounterThroughputBenchmark.CounterName);
         var count = r.Data.StatsByMetric[name];
         Assert.True(count.PerSecondStats.Average > 0.0);
     });
     var d = new ReflectionDiscovery(o, DefaultBenchmarkAssertionRunner.Instance, new RunnerSettings());
     var benchmarks = d.FindBenchmarks(typeof(CounterThroughputBenchmark));
     foreach (var b in benchmarks)
     {
         b.Run();
         b.Finish();
         Assert.True(b.AllAssertsPassed);
     }
 }
예제 #5
0
        public void ShouldExecuteCorrectWarmupCount(int iterationCount)
        {
            var observedWarmupCount = -1; //we have a pre-warmup that always happens no matter what. Need to account for it.
            var assertionOutput = new ActionBenchmarkOutput((report, warmup) =>
            {
                if (warmup)
                {
                    observedWarmupCount++;
                }
            }, results =>
            {
                Assert.Equal(iterationCount, observedWarmupCount);
            });

            var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty);

            var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000,
               new List<IBenchmarkSetting>() { counterBenchmark }, new Dictionary<MetricName, MetricsCollectorSelector>() {
                   { counterBenchmark.MetricName, new CounterSelector() } });

            var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput);

            benchmark.Run();
        }