public static void Test() { var reports = new BenchmarkRunner().RunCompetition(new ParamsTestStaticField()); foreach (var param in new[] { 1, 2, 3, 8, 9, 10 }) Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, GetTestOutput()); Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, GetTestOutput()); }
public void Test() { var reports = new BenchmarkRunner().Run<ParamsTestStaticProperty>(); foreach (var param in new[] { 1, 2, 3, 8, 9, 10 }) Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, GetTestOutput()); Assert.DoesNotContain($"// ### New Parameter {default(int)} ###" + Environment.NewLine, GetTestOutput()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run<SetupAttributeTest>(); Assert.Contains("// ### Setup called ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var extenders = new[] { BenchmarkStatResultExtender.StdDev, BenchmarkStatResultExtender.Min, BenchmarkStatResultExtender.Q1, BenchmarkStatResultExtender.Median, BenchmarkStatResultExtender.Q3, BenchmarkStatResultExtender.Max, BenchmarkStatResultExtender.OperationPerSecond }; var plugins = BenchmarkPluginBuilder.CreateDefault(). AddLogger(logger). AddResultExtenders(extenders). Build(); var reports = new BenchmarkRunner(plugins).Run<Target>().ToList(); output.WriteLine(logger.GetLog()); var table = BenchmarkExporterHelper.BuildTable(reports, plugins.ResultExtenders); var headerRow = table.First(); foreach (var extender in extenders) Assert.True(headerRow.Contains(extender.ColumnName)); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run<PerformanceUnitTest>(); // Sanity checks, to be sure that the different benchmarks actually run var testOutput = logger.GetLog(); Assert.Contains("// ### Slow Benchmark called ###" + Environment.NewLine, testOutput); Assert.Contains("// ### Fast Benchmark called ###" + Environment.NewLine, testOutput); // Check that slow benchmark is actually slower than the fast benchmark! var slowBenchmarkRun = reports.GetRunsFor<PerformanceUnitTest>(r => r.SlowBenchmark()).First(); var fastBenchmarkRun = reports.GetRunsFor<PerformanceUnitTest>(r => r.FastBenchmark()).First(); Assert.True(slowBenchmarkRun.AverageNanoseconds > fastBenchmarkRun.AverageNanoseconds, string.Format("Expected SlowBenchmark: {0:N2} ns to be MORE than FastBenchmark: {1:N2} ns", slowBenchmarkRun.AverageNanoseconds, fastBenchmarkRun.AverageNanoseconds)); Assert.True(slowBenchmarkRun.OpsPerSecond < fastBenchmarkRun.OpsPerSecond, string.Format("Expected SlowBenchmark: {0:N2} Ops to be LESS than FastBenchmark: {1:N2} Ops", slowBenchmarkRun.OpsPerSecond, fastBenchmarkRun.OpsPerSecond)); // Whilst we're at it, let's do more specific Asserts as we know what the elasped time should be var slowBenchmarkReport = reports.GetReportFor<PerformanceUnitTest>(r => r.SlowBenchmark()); var fastBenchmarkReport = reports.GetReportFor<PerformanceUnitTest>(r => r.FastBenchmark()); foreach (var slowRun in slowBenchmarkReport.Runs) Assert.InRange(slowRun.AverageNanoseconds / 1000.0 / 1000.0, low: 499, high: 502); foreach (var fastRun in fastBenchmarkReport.Runs) Assert.InRange(fastRun.AverageNanoseconds / 1000.0 / 1000.0, low: 14, high: 17); }
public static void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run<ParamsTestStaticField>(); foreach (var param in new[] { 1, 2, 3, 8, 9, 10 }) Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); // Run our "Pretend F# test" (see above for more info) var reports = new BenchmarkRunner(plugins).Run<BenchmarkSpec.Db>(); var testLog = logger.GetLog(); Assert.Contains("// ### F# Benchmark method called ###", testLog); Assert.DoesNotContain("No benchmarks found", testLog); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var sourceDiagnoser = new BenchmarkSourceDiagnoser(); var plugins = BenchmarkPluginBuilder.CreateDefault() .AddLogger(logger) .AddDiagnoser(sourceDiagnoser) .Build(); var reports = new BenchmarkRunner(plugins).Run<SourceDiagnoserTest>(); var testOutput = logger.GetLog(); Assert.Contains($"Printing Code for Method: {this.GetType().FullName}.DictionaryEnumeration()", testOutput); Assert.Contains("PrintAssembly=True", testOutput); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var plugins = BenchmarkPluginBuilder.CreateDefault().AddLogger(logger).Build(); var reports = new BenchmarkRunner(plugins).Run<BenchmarkModeTests>(); var testLog = logger.GetLog(); Assert.Contains("// ### BenchmarkSingleRunVoid method called ###", testLog); Assert.Contains("// ### BenchmarkSingleRunWithReturnValue method called ###", testLog); Assert.Contains("// ### BenchmarkSingleRunVoid method called ###", testLog); Assert.Contains("// ### BenchmarkSingleRunWithReturnValue method called ###", testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); }
public void Test() { var logger = new BenchmarkAccumulationLogger(); var testExporter = new TestBenchmarkExporter(); var plugins = BenchmarkPluginBuilder.CreateDefault() .AddLogger(logger) .AddExporters(testExporter) .Build(); var reports = new BenchmarkRunner(plugins).Run(this.GetType()).ToList(); // Ensure that when the TestBenchmarkExporter() was run, it wasn't passed an instance of "BenchmarkBaselineDeltaResultExtender" Assert.False(testExporter.ExportCalled); Assert.Null(testExporter.ExportResultExtenders); Assert.NotNull(testExporter.ExportToFileResultExtenders); Assert.Equal(0, testExporter.ExportToFileResultExtenders.OfType<BenchmarkBaselineDeltaResultExtender>().Count()); Assert.True(testExporter.ExportToFileCalled); }
public void Test() { // This is the common way to run benchmarks, it should wire up the BenchmarkBaselineDeltaResultExtender for us var runner = new BenchmarkRunner(); var reports = runner.Run<BaselineDeltaResultExtenderTest>().ToList(); var table = BenchmarkExporterHelper.BuildTable(reports, runner.Plugins.ResultExtenders); var headerRow = table.First(); var extender = runner.Plugins.ResultExtenders.OfType<BenchmarkBaselineDeltaResultExtender>().FirstOrDefault(); Assert.NotNull(extender); Assert.Equal(extender.ColumnName, headerRow.Last()); var testNameColumn = Array.FindIndex(headerRow, c => c == "Method"); var extraColumn = Array.FindIndex(headerRow, c => c == extender.ColumnName); foreach (var row in table) { Assert.Equal(row.Length, extraColumn + 1); if (row[testNameColumn] == "BenchmarkSlow") // This is our baseline Assert.Equal(row[extraColumn], "-"); else if (row[testNameColumn] == "BenchmarkFast") // This should have been compared to the baseline Assert.Contains("%", row[extraColumn]); } }
private static void Main(string[] args) { //ManualConfig config = ManualConfig //.Create(DefaultConfig.Instance) //.WithOptions(ConfigOptions.DisableOptimizationsValidator); BenchmarkRunner.Run <From_Account_To_AccountDto>(); BenchmarkRunner.Run <From_TC0_Members_To_TC0_I0_Members>(); BenchmarkRunner.Run <From_TC0_Members_To_TC0_I1_Members>(); BenchmarkRunner.Run <From_TC0_Members_To_TC0_I2_Nullable_Members>(); BenchmarkRunner.Run <From_TC0_Members_To_TS0_I0_Members>(); BenchmarkRunner.Run <From_TC0_Members_To_TS0_I1_Members>(); BenchmarkRunner.Run <From_TC0_Members_To_TS0_I2_Nullable_Members>(); BenchmarkRunner.Run <From_TS0_Members_To_TC0_I0_Members>(); BenchmarkRunner.Run <From_TS0_Members_To_TC0_I1_Members>(); BenchmarkRunner.Run <From_TS0_Members_To_TC0_I2_Nullable_Members>(); BenchmarkRunner.Run <From_TS0_Members_To_TS0_I0_Members>(); BenchmarkRunner.Run <From_TS0_Members_To_TS0_I1_Members>(); BenchmarkRunner.Run <From_TS0_Members_To_TS0_I2_Nullable_Members>(); BenchmarkRunner.Run <From_TC1_To_TC1_0>(); BenchmarkRunner.Run <From_TC1_To_TS1_0>(); BenchmarkRunner.Run <From_TS1_To_TC1_0>(); BenchmarkRunner.Run <From_TS1_To_TS1_0>(); var currentResultsPath = Path.Combine(GetProjectPath(), "BenchmarkDotNet.Artifacts", "results"); var benchmarksResultsPath = Path.Combine(GetBenchmarksResultsPath(), DateTime.Now.ToString("yyyy.MM.dd")); if (Directory.Exists(benchmarksResultsPath)) { Directory.Delete(benchmarksResultsPath, true); } CopyDirectory(currentResultsPath, benchmarksResultsPath, true); ConfigureMappers(); WriteTestResults( benchmarksResultsPath, GetTestResults <Account, AccountDto>(_fixture.Create <Account>()) .Concat(GetTestResults <TC0_Members, TC0_I0_Members>(_fixture.Create <TC0_Members>())) .Concat(GetTestResults <TC0_Members, TC0_I1_Members>(_fixture.Create <TC0_Members>())) .Concat(GetTestResults <TC0_Members, TC0_I2_Nullable_Members>(_fixture.Create <TC0_Members>())) .Concat(GetTestResults <TC0_Members, TS0_I0_Members>(_fixture.Create <TC0_Members>())) .Concat(GetTestResults <TC0_Members, TS0_I1_Members>(_fixture.Create <TC0_Members>())) .Concat(GetTestResults <TC0_Members, TS0_I2_Nullable_Members>(_fixture.Create <TC0_Members>())) .Concat(GetTestResults <TS0_Members, TC0_I0_Members>(Mapper <TC0_Members, TS0_Members> .Map(_fixture.Create <TC0_Members>()))) .Concat(GetTestResults <TS0_Members, TC0_I1_Members>(Mapper <TC0_Members, TS0_Members> .Map(_fixture.Create <TC0_Members>()))) .Concat(GetTestResults <TS0_Members, TC0_I2_Nullable_Members>(Mapper <TC0_Members, TS0_Members> .Map(_fixture.Create <TC0_Members>()))) .Concat(GetTestResults <TS0_Members, TS0_I0_Members>(Mapper <TC0_Members, TS0_Members> .Map(_fixture.Create <TC0_Members>()))) .Concat(GetTestResults <TS0_Members, TS0_I1_Members>(Mapper <TC0_Members, TS0_Members> .Map(_fixture.Create <TC0_Members>()))) .Concat(GetTestResults <TS0_Members, TS0_I2_Nullable_Members>(Mapper <TC0_Members, TS0_Members> .Map(_fixture.Create <TC0_Members>()))) .Concat(GetTestResults <TC1, TC1_0>(_fixture.Create <TC1>())) .Concat(GetTestResults <TC1, TS1_0>(_fixture.Create <TC1>())) .Concat(GetTestResults <TS1, TC1_0>(Mapper <TC1, TS1> .Map(_fixture.Create <TC1>()))) .Concat(GetTestResults <TS1, TS1_0>(Mapper <TC1, TS1> .Map(_fixture.Create <TC1>()))) ); Console.ReadLine(); }
public void SM4Benchmark() { var _ = BenchmarkRunner.Run <SM4Benchmark>(); }
public static void Main(string[] args) { var summary = BenchmarkRunner.Run <InsertTestsWithoutTransactions>(); }
public void Test() { var reports = new BenchmarkRunner().Run<SetupAttributeTest>(); Assert.Contains("// ### Setup called ###" + Environment.NewLine, GetTestOutput()); }
public void CFBBenchmark() { var _ = BenchmarkRunner.Run <CFBBenchmark>(); }
private static void Main() { BenchmarkRunner.Run <Benchmark>(); }
public void AtomicLongCompareBenchmark() { BenchmarkRunner.Run <ReservoirSamplingBenchmarks>(); }
static void Main() { BenchmarkRunner.Run <MemoryBenchmarks>(); }
static void Main(string[] args) { BenchmarkRunner.Run <ReadProcessMemory>(); }
public static void Main(string[] args) { var summary = BenchmarkRunner.Run <Benchmarks>(); }
static void Main(string[] args) { var summary = BenchmarkRunner.Run <FileReaderTester>(); }
// ReSharper disable once UnusedParameter.Local static void Main(string[] args) { BenchmarkRunner.Run <Test>(); }
static void Main(string[] args) { BenchmarkRunner.Run <PriorityQueueListVsLinkedList>(); }
public static void Main(string[] args) { var containers = ContainerAdapterFactory.CreateAdapters().ToArray(); var benchmarks = BenchmarkFactory.CreateBenchmarks().ToArray(); var benchmarkResults = new List <BenchmarkResult>(); var existingBenchmarkResults = new List <BenchmarkResult>(); if (args != null && args.Any(a => a.Equals("-update", StringComparison.OrdinalIgnoreCase))) { existingBenchmarkResults.AddRange(XmlOutputReader.GetExistingBenchmarkResults(benchmarks, containers)); } foreach (var container in containers) { var containerBenchmarkResults = new List <BenchmarkResult>(); Console.WriteLine( "{0} {1}{2} {3,10} {4,10}", container.Name, container.Version, new string(' ', benchmarks.Select(b => b.Name.Length).OrderByDescending(n => n).First() - container.Name.Length - container.Version.Length), "Single", "Multi"); container.Prepare(); foreach (var benchmark in benchmarks) { var benchmarkResult = existingBenchmarkResults.SingleOrDefault(b => b.Container == container && b.Benchmark == benchmark); if (benchmarkResult == null) { benchmarkResult = new BenchmarkRunner(container, benchmark).Run(); } containerBenchmarkResults.Add(benchmarkResult); Console.WriteLine( " {0}{1} {2,10} {3,10}", benchmarkResult.Benchmark.Name, new string(' ', benchmarks.Select(b => b.Name.Length).OrderByDescending(n => n).First() - benchmarkResult.Benchmark.Name.Length), benchmarkResult.SingleThreadedResult, benchmarkResult.MultiThreadedResult); } container.Dispose(); // All benchmarks of container have completed, now 'commit' results benchmarkResults.AddRange(containerBenchmarkResults); Console.WriteLine(); } IOutput output = new MultiOutput( new XmlOutput(), new HtmlOutput(), new MarkdownOutput(), new CsvOutput(), new CsvRateOutput(), new ChartOutput(), new ZipOutput()); output.Create(benchmarks, benchmarkResults); Console.WriteLine("Done"); }
private static void Main(string[] args) { var summary = BenchmarkRunner .Run <ResourceBenchmark>(); }
public static void Main(string[] args) { var summary = BenchmarkRunner.Run <ConstVSEmbedded>(); }
public static void Main() => BenchmarkRunner.Run <Program>();
public void AESBenchmark() { var _ = BenchmarkRunner.Run <AESBenchmark>(); }
public static void Main() => BenchmarkRunner.Run <DestructuringBenchmark>();
static void Main(string[] args) { _ = BenchmarkRunner.Run <Demo>(); }
static void Main(string[] args) { BenchmarkRunner.Run <SkImageResizerBenchmark>(); }
static void Main(string[] args) { var summary = BenchmarkRunner.Run <OriginalVsAdjusted>(); }
private static void Main(string[] args) { BenchmarkRunner.Run<GenericsBenchmark>(); Console.ReadLine(); }
public static void Main(string[] args) { var summary = BenchmarkRunner.Run <Serializers>(); Console.ReadKey(); }
public MainWindow() { InitializeComponent(); var logger = new FctbLogger(OutputTextBox); runner = new BenchmarkRunner(new[] { logger }); }
public static void Main() => BenchmarkRunner.Run <Benchmarks>();
public void ChaCha20OriginalBenchmark() { var _ = BenchmarkRunner.Run <ChaCha20OriginalBenchmark>(); }
static void Main(string[] args) { BenchmarkRunner.Run <IntsContainsCompareBenchmark>(); }
private static void Main(string[] args) { var summary = BenchmarkRunner.Run(typeof(Program).Assembly); }
public void XChaCha20Benchmark() { var _ = BenchmarkRunner.Run <XChaCha20Benchmark>(); }
static void Main(string[] args) { BenchmarkRunner.Run <LexerStringReaderBenchmarks.HeapAllocVsNoHeapAlloc>(); }
public void XSalsa20Benchmark() { var _ = BenchmarkRunner.Run <XSalsa20Benchmark>(); }