public void ShouldComputeMetricsCorrectly(int iterationCount) { var assertionOutput = new ActionBenchmarkOutput((report, warmup) => { if (!warmup) { var counterResults = report.Metrics[CounterName]; Assert.Equal(1, counterResults.MetricValue); } }, results => { var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max; Assert.Equal(iterationCount, counterResults); }); var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000, new[] { gcBenchmark }, new MemoryBenchmarkSetting[0], new[] {counterBenchmark}); var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput); benchmark.Run(); }
[InlineData(2, 300)] // keep the values small since there's a real delay involved public void ShouldComputeMetricsCorrectly(int iterationCount, int millisecondRuntime) { var assertionOutput = new ActionBenchmarkOutput((report, warmup) => { if (warmup) return; var counterResults = report.Metrics[CounterName]; var projectedThroughput = 1000/(double)IterationSpeedMs; // roughly the max value of this counter var observedDifference = Math.Abs(projectedThroughput - counterResults.MetricValuePerSecond); Assert.True(observedDifference <= 1.5d, $"delta between expected value and actual measured value should be <= 1.5, was {observedDifference} [{counterResults.MetricValuePerSecond} op /s]. Expected [{projectedThroughput} op /s]"); }, results => { var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max; Assert.Equal(iterationCount, counterResults); }); var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Throughput, iterationCount, millisecondRuntime, new[] { gcBenchmark }, new[] { memoryBenchmark }, new[] { counterBenchmark }); var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput); benchmark.Run(); }
public IReadOnlyList<AssertionResult> RunAssertions(BenchmarkSettings settings, BenchmarkResults results) { Contract.Requires(settings != null); var assertionResults = new List<AssertionResult>(); // Not in testing mode, therefore we don't need to apply these BenchmarkAssertions if (settings.TestMode == TestMode.Measurement) { return assertionResults; } // collect all benchmark settings with non-empty BenchmarkAssertions IReadOnlyList<IBenchmarkSetting> allSettings = settings.Measurements.Where(x => !x.Assertion.Equals(Assertion.Empty)).ToList(); foreach (var setting in allSettings) { var stats = results.StatsByMetric[setting.MetricName]; double valueToBeTested; if (setting.AssertionType == AssertionType.Throughput) { valueToBeTested = stats.PerSecondStats.Average; } else { valueToBeTested = stats.Stats.Average; } var assertionResult = AssertionResult.CreateResult(setting.MetricName, stats.Unit, valueToBeTested, setting.Assertion); assertionResults.Add(assertionResult); } return assertionResults; }
public IReadOnlyList <AssertionResult> RunAssertions(BenchmarkSettings settings, BenchmarkResults results) { Contract.Requires(settings != null); var assertionResults = new List <AssertionResult>(); // Not in testing mode, therefore we don't need to apply these BenchmarkAssertions if (settings.TestMode == TestMode.Measurement) { return(assertionResults); } // collect all benchmark settings with non-empty BenchmarkAssertions IReadOnlyList <IBenchmarkSetting> allSettings = settings.Measurements.Where(x => !x.Assertion.Equals(Assertion.Empty)).ToList(); foreach (var setting in allSettings) { var stats = results.StatsByMetric[setting.MetricName]; double valueToBeTested; if (setting.AssertionType == AssertionType.Throughput) { valueToBeTested = stats.PerSecondStats.Average; } else { valueToBeTested = stats.Stats.Average; } var assertionResult = AssertionResult.CreateResult(setting.MetricName, stats.Unit, valueToBeTested, setting.Assertion); assertionResults.Add(assertionResult); } return(assertionResults); }
public Benchmark(BenchmarkSettings settings, IBenchmarkInvoker invoker, IBenchmarkOutput writer) { Settings = settings; _pendingIterations = Settings.NumberOfIterations; Invoker = invoker; Output = writer; CompletedRuns = new Queue <BenchmarkRunReport>(Settings.NumberOfIterations); Builder = new BenchmarkBuilder(Settings); }
public Benchmark(BenchmarkSettings settings, IBenchmarkInvoker invoker, IBenchmarkOutput writer) { Settings = settings; _pendingIterations = Settings.NumberOfIterations; Invoker = invoker; Output = writer; CompletedRuns = new Queue<BenchmarkRunReport>(Settings.NumberOfIterations); Builder = new BenchmarkBuilder(Settings); }
public BenchmarkResults(string typeName, BenchmarkSettings settings, IReadOnlyList<BenchmarkRunReport> runs) { Contract.Requires(!string.IsNullOrEmpty(typeName)); Contract.Requires(runs != null); BenchmarkName = typeName; Settings = settings; Runs = runs; StatsByMetric = new Dictionary<MetricName, AggregateMetrics>(); StatsByMetric = Aggregate(Runs); Exceptions = Runs.SelectMany(r => r.Exceptions).ToList(); }
public void Should_build_when_exactly_one_metric_assigned() { var counterBenchmark = new CounterBenchmarkSetting("Test", AssertionType.Total, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, 10, 1000, new GcBenchmarkSetting[0], new MemoryBenchmarkSetting[0], new CounterBenchmarkSetting[] { counterBenchmark}); var builder = new BenchmarkBuilder(settings); var run = builder.NewRun(WarmupData.PreWarmup); Assert.Equal(1, run.MeasureCount); Assert.Equal(1, run.Counters.Count); Assert.True(run.Counters.ContainsKey(counterBenchmark.CounterName)); }
public void Should_build_when_at_least_one_metric_assigned() { var counterBenchmark = new CounterBenchmarkSetting("Test", AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, 10, 1000, new[] {gcBenchmark}, new[] { memoryBenchmark }, new[] { counterBenchmark }); var builder = new BenchmarkBuilder(settings); var run = builder.NewRun(WarmupData.PreWarmup); Assert.Equal(2 + (SysInfo.Instance.MaxGcGeneration + 1), run.MeasureCount); Assert.Equal(1, run.Counters.Count); Assert.True(run.Counters.ContainsKey(counterBenchmark.CounterName)); }
public void Should_build_when_at_least_one_metric_assigned() { var counterBenchmark = new CounterBenchmarkSetting("Test", AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.Gen2, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, 10, 1000, new List<IBenchmarkSetting>(){gcBenchmark, memoryBenchmark, counterBenchmark }, new Dictionary<MetricName, MetricsCollectorSelector>() { { gcBenchmark.MetricName, new GcCollectionsSelector() }, { counterBenchmark.MetricName, new CounterSelector() }, { memoryBenchmark.MetricName, new TotalMemorySelector() } }); var builder = new BenchmarkBuilder(settings); var run = builder.NewRun(WarmupData.PreWarmup); Assert.Equal(3, run.MeasureCount); Assert.Equal(1, run.Counters.Count); Assert.True(run.Counters.ContainsKey(counterBenchmark.CounterName)); }
public void ShouldExecuteCorrectWarmupCount(int iterationCount) { var observedWarmupCount = -1; //we have a pre-warmup that always happens no matter what. Need to account for it. var assertionOutput = new ActionBenchmarkOutput((report, warmup) => { if (warmup) { observedWarmupCount++; } }, results => { Assert.Equal(iterationCount, observedWarmupCount); }); var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000, new List<IBenchmarkSetting>() { counterBenchmark }, new Dictionary<MetricName, MetricsCollectorSelector>() { { counterBenchmark.MetricName, new CounterSelector() } }); var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput); benchmark.Run(); }
/// <summary> /// Backwards-compatible constructor for NBench 0.1.6 and earlier. /// </summary> /// <param name="settings">The settings for this benchmark.</param> /// <param name="invoker">The invoker used to execute benchmark and setup / cleanup methods.</param> /// <param name="writer">The output target this benchmark will write to.</param> /// <remarks>Uses the <see cref="DefaultBenchmarkAssertionRunner"/> to assert benchmark data.</remarks> public Benchmark(BenchmarkSettings settings, IBenchmarkInvoker invoker, IBenchmarkOutput writer) : this(settings, invoker, writer, DefaultBenchmarkAssertionRunner.Instance) { }
/// <summary> /// Creates a new benchmark builder instance. /// </summary> /// <param name="settings">The settings compiled for this benchmark.</param> public BenchmarkBuilder(BenchmarkSettings settings) { Contract.Requires(settings.TotalTrackedMetrics > 0); Settings = settings; }