public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run <SetupAttributeTest>(); Assert.Contains("// ### Setup called ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run <ParamsTestField>(); foreach (var param in new[] { 1, 2, 3, 8, 9, 10 }) { Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); } Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var sourceDiagnoser = new BenchmarkSourceDiagnoser(); var plugins = BenchmarkPluginBuilder.CreateDefault() .AddLogger(logger) .AddDiagnoser(sourceDiagnoser) .Build(); var reports = new BenchmarkRunner(plugins).Run <SourceDiagnoserTest>(); var testOutput = logger.GetLog(); Assert.Contains("Printing Code for Method: BenchmarkDotNet.IntegrationTests.SourceDiagnoserTest.DictionaryEnumeration()", testOutput); Assert.Contains("PrintAssembly=True", testOutput); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); new BenchmarkRunner(plugins).Run <FlatClassBenchmark>(); var expected1 = $"// ### Benchmark: SerializationLibrary1, Type: {typeof(FlatClassBenchmark).Name} ###"; Assert.Contains(expected1, logger.GetLog()); logger.ClearLog(); new BenchmarkRunner(plugins).Run <DoubleArrayBenchmark>(); var expected2 = $"// ### Benchmark: SerializationLibrary2, Type: {typeof(DoubleArrayBenchmark).Name} ###"; Assert.Contains(expected2, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run <PerformanceUnitTest>(); // Sanity checks, to be sure that the different benchmarks actually run var testOutput = logger.GetLog(); Assert.Contains("// ### Slow Benchmark called ###" + Environment.NewLine, testOutput); Assert.Contains("// ### Fast Benchmark called ###" + Environment.NewLine, testOutput); // Check that slow benchmark is actually slower than the fast benchmark! var slowBenchmarkRun = reports.GetRunsFor <PerformanceUnitTest>(r => r.SlowBenchmark()).First(); var fastBenchmarkRun = reports.GetRunsFor <PerformanceUnitTest>(r => r.FastBenchmark()).First(); Assert.True(slowBenchmarkRun.AverageNanoseconds > fastBenchmarkRun.AverageNanoseconds, string.Format("Expected SlowBenchmark: {0:N2} ns to be MORE than FastBenchmark: {1:N2} ns", slowBenchmarkRun.AverageNanoseconds, fastBenchmarkRun.AverageNanoseconds)); Assert.True(slowBenchmarkRun.OpsPerSecond < fastBenchmarkRun.OpsPerSecond, string.Format("Expected SlowBenchmark: {0:N2} Ops to be LESS than FastBenchmark: {1:N2} Ops", slowBenchmarkRun.OpsPerSecond, fastBenchmarkRun.OpsPerSecond)); // Whilst we're at it, let's do more specific Asserts as we know what the elasped time should be var slowBenchmarkReport = reports.GetReportFor <PerformanceUnitTest>(r => r.SlowBenchmark()); var fastBenchmarkReport = reports.GetReportFor <PerformanceUnitTest>(r => r.FastBenchmark()); foreach (var slowRun in slowBenchmarkReport.Runs) { Assert.InRange(slowRun.AverageNanoseconds / 1000.0 / 1000.0, low: 499, high: 502); } foreach (var fastRun in fastBenchmarkReport.Runs) { Assert.InRange(fastRun.AverageNanoseconds / 1000.0 / 1000.0, low: 14, high: 17); } }
public BenchmarkRunner(IBenchmarkPlugins plugins = null) { Plugins = plugins ?? BenchmarkPluginBuilder.CreateDefault().Build(); }