public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run <SetupAttributeTest>(); Assert.Contains("// ### Setup called ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run <ParamsTestField>(); foreach (var param in new[] { 1, 2, 3, 8, 9, 10 }) { Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); } Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var sourceDiagnoser = new BenchmarkSourceDiagnoser(); var plugins = BenchmarkPluginBuilder.CreateDefault() .AddLogger(logger) .AddDiagnoser(sourceDiagnoser) .Build(); var reports = new BenchmarkRunner(plugins).Run <SourceDiagnoserTest>(); var testOutput = logger.GetLog(); Assert.Contains("Printing Code for Method: BenchmarkDotNet.IntegrationTests.SourceDiagnoserTest.DictionaryEnumeration()", testOutput); Assert.Contains("PrintAssembly=True", testOutput); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); new BenchmarkRunner(plugins).Run <FlatClassBenchmark>(); var expected1 = $"// ### Benchmark: SerializationLibrary1, Type: {typeof(FlatClassBenchmark).Name} ###"; Assert.Contains(expected1, logger.GetLog()); logger.ClearLog(); new BenchmarkRunner(plugins).Run <DoubleArrayBenchmark>(); var expected2 = $"// ### Benchmark: SerializationLibrary2, Type: {typeof(DoubleArrayBenchmark).Name} ###"; Assert.Contains(expected2, logger.GetLog()); }
public void CustomToolchain() { var generator = new MyGenerator(); var builder = new MyBuilder(); var executor = new MyExecutor(); var plugins = BenchmarkPluginBuilder.CreateEmpty(). AddToolchain(new BenchmarkToolchainBuilder( BenchmarkToolchain.Custom1, (benchmark, logger) => generator, (benchmark, logger) => builder, (benchmark, logger) => executor)); new BenchmarkRunner(plugins).Run <ToolchainTest>(); Assert.True(generator.Done); Assert.True(builder.Done); Assert.True(executor.Done); Assert.Throws <NotSupportedException>(() => new BenchmarkRunner(BenchmarkPluginBuilder.CreateEmpty()).Run <ToolchainTest>()); }
private void RunBenchmarks(string[] args) { for (int i = 0; i < Types.Length; i++) { var type = Types[i]; if (args.Any(arg => type.Name.ToLower().StartsWith(arg.ToLower())) || args.Contains("#" + i) || args.Contains("" + i) || args.Contains("*")) { logger.WriteLineHeader("Target competition: " + type.Name); new BenchmarkRunner(BenchmarkPluginBuilder.BuildFromArgs(args).Build()).Run(type); logger.NewLine(); } } // TODO: move this logic to the RunUrl method if (args.Length > 0 && (args[0].StartsWith("http://") || args[0].StartsWith("https://"))) { var url = args[0]; Uri uri = new Uri(url); var name = uri.IsFile ? Path.GetFileName(uri.LocalPath) : "URL"; new BenchmarkRunner(BenchmarkPluginBuilder.BuildFromArgs(args).Build()).RunUrl(url); } }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run <PerformanceUnitTest>(); // Sanity checks, to be sure that the different benchmarks actually run var testOutput = logger.GetLog(); Assert.Contains("// ### Slow Benchmark called ###" + Environment.NewLine, testOutput); Assert.Contains("// ### Fast Benchmark called ###" + Environment.NewLine, testOutput); // Check that slow benchmark is actually slower than the fast benchmark! var slowBenchmarkRun = reports.GetRunsFor <PerformanceUnitTest>(r => r.SlowBenchmark()).First(); var fastBenchmarkRun = reports.GetRunsFor <PerformanceUnitTest>(r => r.FastBenchmark()).First(); Assert.True(slowBenchmarkRun.AverageNanoseconds > fastBenchmarkRun.AverageNanoseconds, string.Format("Expected SlowBenchmark: {0:N2} ns to be MORE than FastBenchmark: {1:N2} ns", slowBenchmarkRun.AverageNanoseconds, fastBenchmarkRun.AverageNanoseconds)); Assert.True(slowBenchmarkRun.OpsPerSecond < fastBenchmarkRun.OpsPerSecond, string.Format("Expected SlowBenchmark: {0:N2} Ops to be LESS than FastBenchmark: {1:N2} Ops", slowBenchmarkRun.OpsPerSecond, fastBenchmarkRun.OpsPerSecond)); // Whilst we're at it, let's do more specific Asserts as we know what the elasped time should be var slowBenchmarkReport = reports.GetReportFor <PerformanceUnitTest>(r => r.SlowBenchmark()); var fastBenchmarkReport = reports.GetReportFor <PerformanceUnitTest>(r => r.FastBenchmark()); foreach (var slowRun in slowBenchmarkReport.Runs) { Assert.InRange(slowRun.AverageNanoseconds / 1000.0 / 1000.0, low: 499, high: 502); } foreach (var fastRun in fastBenchmarkReport.Runs) { Assert.InRange(fastRun.AverageNanoseconds / 1000.0 / 1000.0, low: 14, high: 17); } }
public BenchmarkRunner(IBenchmarkPlugins plugins = null) { Plugins = plugins ?? BenchmarkPluginBuilder.CreateDefault().Build(); }