protected void RunBenchmarks(Type type, params IColumn[] columns) { var summary = BenchmarkRunner.Run( type, ManualConfig.Create(DefaultConfig.Instance) .With(AsciiDocExporter.Default, JsonExporter.Brief) .With(columns)); // Assert validation errors var validationErrorsStringBuilder = new StringBuilder(); foreach (var error in summary.ValidationErrors) { validationErrorsStringBuilder.AppendLine($"Validation error (critical={error.IsCritical}): {error.Message}"); } var validationError = validationErrorsStringBuilder.ToString().Trim(); if (!string.IsNullOrEmpty(validationError)) { Assert.Inconclusive(validationError); } // Assert build/generate/execute errors var buildErrorsStringBuilder = new StringBuilder(); foreach (var report in summary.Reports) { var buildResult = report.BuildResult; if (!buildResult.IsBuildSuccess) { buildErrorsStringBuilder.AppendLine($"Build exception={buildResult.BuildException.Message}"); } if (!buildResult.IsGenerateSuccess) { buildErrorsStringBuilder.AppendLine($"Generate exception={buildResult.GenerateException.Message}"); } foreach (var executeResult in report.ExecuteResults) { if (executeResult.ExitCode == 0) { continue; } buildErrorsStringBuilder.AppendLine($"Execute result: exit code is not 0"); } } var buildError = buildErrorsStringBuilder.ToString().Trim(); if (!string.IsNullOrEmpty(buildError)) { Assert.Inconclusive(buildError); } }
public static void Main(string[] args) { var config = ManualConfig .Create(DefaultConfig.Instance) .With(new CustomMarkdownExporter()) .With(StatisticColumn.AllStatistics) .With(ExecutionValidator.FailOnError) .With(Job.Default.With(Runtime.Core)) .With(Job.Default.With(Runtime.Clr)); BenchmarkRunner.Run <SolrSearchBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.ParameterContainerBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.Result.DocumentResultBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.Result.FacetFieldResultBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.Result.FacetQueryResultBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.Result.FacetRangeResultBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.ParameterContainerBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.Result.DocumentResultBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.Result.FacetFieldResultBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.Result.FacetQueryResultBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.Result.FacetRangeResultBenchmarks>(config); Console.Read(); }
public void Benchmark_Tests20180119Dataset01() { BenchmarkRunner .Run <Tests.CommonShared.Core.Linq.UnitTestsMemory01> ( ManualConfig .Create(new Config()) //.WithLaunchCount(1) // benchmark process will be launched only once //.WithIterationTime(100) // 100ms per iteration //.WithWarmupCount(3) // 3 warmup iteration //.WithTargetCount(3) // 3 target iteration //.With(BenchmarkDotNet.Jobs.Job.RyuJitX64) //.With(BenchmarkDotNet.Jobs.Job.Core) //.With(BenchmarkDotNet.Validators.ExecutionValidator.FailOnError) .WithArtifactsPath ( #if NUNIT TestContext.CurrentContext.TestDirectory #elif XUNIT Environment.CurrentDirectory #elif MSTEST Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) #endif ) ); return; }
/// <summary> /// The main entry point. /// </summary> internal static void Main() { var config = ManualConfig.Create(DefaultConfig.Instance) .With ( new SimpleFilter ( b => { var isClrJob = b.Job.Env.Runtime.Name == "Clr"; var isRunningOnMono = Type.GetType("Mono.Runtime") is null; if (!isClrJob) { return(true); } return(!isRunningOnMono); } ) ); var refSummary = BenchmarkRunner.Run <InteropMethodsByRef>(config); var valueSummary = BenchmarkRunner.Run <InteropMethodsByValue>(config); /*var logger = ConsoleLogger.Default; * MarkdownExporter.Console.ExportToLog(refSummary, logger); * MarkdownExporter.Console.ExportToLog(valueSummary, logger); * * ConclusionHelper.Print(logger, config.GetCompositeAnalyser().Analyse(refSummary).ToList()); * ConclusionHelper.Print(logger, config.GetCompositeAnalyser().Analyse(valueSummary).ToList());*/ }
public static void Main(string[] args) { // new EntryPoint().Test(); // return; //new ProtoBufvsGroBufRunner().Test(); //return; // BenchmarkRunner.Run<EntryPoint>( // ManualConfig.Create(DefaultConfig.Instance) // .With(Job.LegacyJitX86) // .With(Job.LegacyJitX64) // .With(Job.RyuJitX64) // .With(Job.Mono) // ); // new ProtoBufvsGroBufRunner().Test(); // return; BenchmarkRunner.Run <ProtoBufvsGroBufRunner>( ManualConfig.Create(DefaultConfig.Instance) .With(Job.LegacyJitX86) .With(Job.LegacyJitX64) .With(Job.RyuJitX64) .With(Job.Mono) ); //var data = new StaticSerializer().Serialize(new Flat {Number = 1, Room = new Room {Area = 100}}); //var flat = new DynamicSerializer().Deserialize<Flat>(data); //Console.WriteLine(flat.Number); //Console.WriteLine(flat.Room.Area); }
public static void Run() { BenchmarkRunner.Run <IntroUnicodeFluentConfig>( ManualConfig .Create(DefaultConfig.Instance) .With(ConsoleLogger.Unicode)); }
static void Main(string[] args) { BenchmarkRunner.Run <BenchmarkGridScenario>(ManualConfig .Create(DefaultConfig.Instance) .WithArtifactsPath(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Benchmark.Results"))); //TODO: f }
static void Main(string[] args) { #if RELEASE BenchmarkRunner.Run <Benchmarks>(ManualConfig.Create(DefaultConfig.Instance).With(MemoryDiagnoser.Default)); #else var result = Script.Compile(@" any end"); foreach (var item in result.Errors) { Console.WriteLine(item); } if (result.Errors.ContainsErrors) { Console.ReadKey(true); return; } result.Script.Run(new Machine()); Console.ReadKey(true); #endif }
static void Main(string[] args) { var summary = BenchmarkRunner.Run <Benchmark>( ManualConfig .Create(DefaultConfig.Instance) .With(Job.Core.WithIterationCount(150))); }
public static void Main(string[] args) { ManualConfig config = (ManualConfig)ManualConfig.Create(DefaultConfig.Instance).With(Job.RyuJitX64); config.Options |= ConfigOptions.JoinSummary; BenchmarkRunInfo[] benchmarkRunInfos = { BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarkSum), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksAverage), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksFirst), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksLast), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksMax), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksMin), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksAggregate), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksRepeat), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarkOrderBy), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarkSequenceEqual), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarkSelect), config), BenchmarkConverter.TypeToBenchmarks(typeof(BenchmarksWhereAggregate), config), //BenchmarkConverter.TypeToBenchmarks(typeof ( }; Summary[] summaries = BenchmarkRunner.Run(benchmarkRunInfos); Console.WriteLine("Press enter to close"); Console.ReadLine(); }
private static void RunPerformanceBenchmarks() { var config = ManualConfig.Create(DefaultConfig.Instance) .With(Job.Clr) .With(Job.Core); var summary = BenchmarkRunner.Run <DeserilaizationBenchmark>(config); }
static void Main(string[] args) { var config = ManualConfig.Create(DefaultConfig.Instance); config.Add(new CsvExporter(CsvSeparator.CurrentCulture, new BenchmarkDotNet.Reports.SummaryStyle { PrintUnitsInHeader = true, PrintUnitsInContent = false, TimeUnit = TimeUnit.Microsecond, SizeUnit = BenchmarkDotNet.Columns.SizeUnit.KB })); config.Add(new Job(EnvMode.LegacyJitX64, EnvMode.Clr, RunMode.Short) { Env = { Runtime = Runtime.Clr, Platform = Platform.X64 }, Run = { LaunchCount = 1, WarmupCount = 1, TargetCount = 1, RunStrategy = BenchmarkDotNet.Engines.RunStrategy.Throughput }, Accuracy = { RemoveOutliers = true } }.WithGcAllowVeryLargeObjects(true)); config.Add(new Job(EnvMode.RyuJitX64, EnvMode.Clr, RunMode.Short) { Env = { Runtime = Runtime.Clr, Platform = Platform.X64 }, Run = { LaunchCount = 1, WarmupCount = 1, TargetCount = 1, RunStrategy = BenchmarkDotNet.Engines.RunStrategy.Throughput }, Accuracy = { RemoveOutliers = true } }.WithGcAllowVeryLargeObjects(true)); config.Add(BenchmarkDotNet.Loggers.ConsoleLogger.Default); config.Add(JitOptimizationsValidator.DontFailOnError); var summary = BenchmarkRunner.Run <ActivationFunctions>(config); }
static void Main(string[] args) { var config = ManualConfig .Create(DefaultConfig.Instance) .With(StatisticColumn.Min) .With(StatisticColumn.Median) .With(StatisticColumn.P95) .With(StatisticColumn.Max) .With(StatisticColumn.CiLower(ConfidenceLevel.L999)) .With(StatisticColumn.CiUpper(ConfidenceLevel.L999)) .With(Job.Default.WithLaunchCount(1).WithWarmupCount(100) //.WithIterationTime(new TimeInterval(1, TimeUnit.Second)) .WithUnrollFactor(1).WithInvocationCount(1) .WithTargetCount(Params.Instance.Value.TargetCount) .WithRemoveOutliers(false)); if (Params.Instance.Value.SingleOperationsOnly) { config = config.With(new CategoryFilter("SingleOperation")); } BenchmarkRunner.Run <Thin.GetBenchmark>(config); BenchmarkRunner.Run <Thin.PutBenchmark>(config); BenchmarkRunner.Run <Thick.GetBenchmark>(config); BenchmarkRunner.Run <Thick.PutBenchmark>(config); }
public static async Task Main() { var logger = new LoggerConfiguration() .Enrich.FromLogContext() .MinimumLevel.Override("System", LogEventLevel.Error) .MinimumLevel.Override("Microsoft", LogEventLevel.Warning) .MinimumLevel.Override("Microsoft.EntityFrameworkCore.Database.Command", LogEventLevel.Information) .Enrich.WithExceptionDetails(new DestructuringOptionsBuilder() .WithDefaultDestructurers() .WithRootName("Exception") .WithDestructurers(new[] { new DbUpdateExceptionDestructurer() })) .WriteTo.Console(theme: AnsiConsoleTheme.Code); Log.Logger = logger.CreateLogger(); loggerFactory = new SerilogLoggerFactory(Log.Logger); var config = ManualConfig.Create(DefaultConfig.Instance); config.Add(new CsvExporter( CsvSeparator.CurrentCulture, new SummaryStyle(true, SizeUnit.KB, TimeUnit.Millisecond, false, false, 20))); config.Add(Job.ShortRun.With(InProcessEmitToolchain.Instance)); // var summary = BenchmarkRunner.Run<PureBenchmark>(config); try { await DatabaseTest(); } catch (Exception e) { Log.Fatal(e, "ERROR"); throw; } }
public static void Main() { BenchmarkRunner.Run <Benchmark>( ManualConfig .Create(DefaultConfig.Instance) .WithOptions(ConfigOptions.DisableOptimizationsValidator)); }
static void Main(string[] args) { Func <RunStrategy, IConfig> cfg = strategy => ManualConfig.Create(DefaultConfig.Instance) .With(Job.Default.With(CsProjCoreToolchain.NetCoreApp20).With(strategy).WithGcServer(true)) .With(Job.Default.With(CsProjClassicNetToolchain.Current.Value).With(strategy).WithGcServer(true)) .With(DefaultExporters.Html) .With(DefaultExporters.RPlot); if (!args.Any()) { args = new[] { "-micro_nofill", "-micro_fill", "-macro_nofill", "-macro_fill" }; } if (args.Contains("-micro_nofill")) { BenchmarkRunner.Run <MicroTestsNoFill>(cfg(RunStrategy.Throughput)); } if (args.Contains("-micro_fill")) { BenchmarkRunner.Run <MicroTestsFill>(cfg(RunStrategy.Throughput)); } if (args.Contains("-macro_nofill")) { BenchmarkRunner.Run <MacroTestsNoFill>(cfg(RunStrategy.Monitoring)); } if (args.Contains("-macro_fill")) { BenchmarkRunner.Run <MacroTestsFill>(cfg(RunStrategy.Monitoring)); } }
public static void Main() { Providers.ForceNativeMKL(); Console.WriteLine("Linear Algebra: " + Control.LinearAlgebraProvider); Console.WriteLine("FFT: " + Control.FourierTransformProvider); var subject = new LinearAlgebra.DenseMatrixProduct(); subject.Verify(); Console.WriteLine("Verified."); var config = ManualConfig.Create(DefaultConfig.Instance); config.Add(Job.RyuJitX64, Job.LegacyJitX86); //config.Add(new MemoryDiagnoser()); BenchmarkRunner.Run <Transforms.FFT>(config); //BenchmarkRunner.Run<LinearAlgebra.DenseMatrixProduct>(config); //Benchmark(new LinearAlgebra.DenseVectorAdd(10000000,1), 10, "Large (10'000'000) - 10x1 iterations"); //Benchmark(new LinearAlgebra.DenseVectorAdd(100,1000), 100, "Small (100) - 100x1000 iterations"); //DenseMatrixProduct.Verify(5); //DenseMatrixProduct.Verify(100); //Benchmark(new DenseMatrixProduct(10,100), 100, "10 - 100x100 iterations"); //Benchmark(new DenseMatrixProduct(25, 100), 100, "25 - 100x100 iterations"); //Benchmark(new DenseMatrixProduct(50, 10), 100, "50 - 100x10 iterations"); //Benchmark(new DenseMatrixProduct(100, 10), 100, "100 - 100x10 iterations"); //Benchmark(new DenseMatrixProduct(250, 1), 10, "250 - 10x1 iterations"); //Benchmark(new DenseMatrixProduct(500,1), 10, "500 - 10x1 iterations"); //Benchmark(new DenseMatrixProduct(1000,1), 2, "1000 - 2x1 iterations"); }
static void Main(string[] args) { var conf = ManualConfig.Create(DefaultConfig.Instance); conf.Add(MemoryDiagnoser.Default); var summary = BenchmarkRunner.Run <DuplicateFindBench>(conf); }
static void Main() { BenchmarkRunner.Run <ParseDouble>(); BenchmarkRunner.Run <ParseDateTime>(); BenchmarkRunner.Run <ParseFile>(ManualConfig.Create(DefaultConfig.Instance) .With(new MemoryDiagnoser())); }
static void Main() { // tell BenchmarkDotNet not to force GC.Collect after benchmark iteration // (single iteration contains of multiple (usually millions) of invocations) // it can influence the allocation-heavy Task<T> benchmarks var gcMode = new GcMode { Force = false }; var customConfig = ManualConfig .Create(DefaultConfig.Instance) // copies all exporters, loggers and basic stuff .With(JitOptimizationsValidator.FailOnError) // Fail if not release mode .With(MemoryDiagnoser.Default) // use memory diagnoser .With(StatisticColumn.OperationsPerSecond) // add ops/s .With(Job.Default.With(gcMode)); #if NET462 // enable the Inlining Diagnoser to find out what does not get inlined // uncomment it first, it produces a lot of output //customConfig = customConfig.With(new BenchmarkDotNet.Diagnostics.Windows.InliningDiagnoser(logFailuresOnly: true, filterByNamespace: true)); #endif var summary = BenchmarkRunner.Run <PerformanceTests>(customConfig); Console.WriteLine(summary); }
static void Main(string[] args) { List <Type> benchmarks = GetBenchmarkTypesFromArgs(args); // Set the argument as the crash dump. We can't just set CrashDump here because it needs to be read from child processes. if (args.Length > 0) { Environment.SetEnvironmentVariable(DumpFileEnv, args[0]); } // We want to run this even if we don't use the result to make sure we can successfully load 'CrashDump'. int targetPointerSize = GetTargetPointerSize(); ManualConfig benchmarkConfiguration = ManualConfig.Create(DefaultConfig.Instance); // Windows supports x86 and x64 so we need to choose the correct version of .Net. Job job; if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { string dotnetPath = GetDotnetPath(targetPointerSize); if (targetPointerSize == 4) { job = Job.RyuJitX86.With(CsProjCoreToolchain.From(NetCoreAppSettings.NetCoreApp31.WithCustomDotNetCliPath(dotnetPath))); } else { job = Job.RyuJitX64.With(CsProjCoreToolchain.From(NetCoreAppSettings.NetCoreApp31.WithCustomDotNetCliPath(dotnetPath))); } } else { job = Job.Default; } string id = $"{RuntimeInformation.OSDescription} {RuntimeInformation.FrameworkDescription} {(targetPointerSize == 4 ? "32bit" : "64bit")}"; job = job.WithId(id) .WithWarmupCount(1) .WithIterationTime(TimeInterval.FromSeconds(1)) .WithMinIterationCount(5) .WithMaxIterationCount(10) .DontEnforcePowerPlan(); // make sure BDN does not try to enforce High Performance power plan on Windows benchmarkConfiguration.Add(job); if (benchmarks.Count == 0) { BenchmarkRunner.Run(Assembly.GetCallingAssembly(), benchmarkConfiguration); } else { foreach (Type t in benchmarks) { BenchmarkRunner.Run(t, benchmarkConfiguration); } } }
static void Main(string[] args) { var summary = BenchmarkRunner.Run<Operators>( ManualConfig.Create(DefaultConfig.Instance).With(Job.RyuJitX64) ); }
public void StartTest() { if (this.GetType() == typeof(PerformanceTest)) { Assert.Pass(); } #if DEBUG Assert.Warn("Debug mode is on\n"); var summary = BenchmarkRunner.Run(this.GetType(), new DebugInProcessConfig() .AddExporter(new CsvExporter(CsvSeparator.Comma)) .WithOptions(ConfigOptions.StopOnFirstError | ConfigOptions.DisableLogFile) ); #else var summary = BenchmarkRunner.Run(this.GetType(), ManualConfig .Create(DefaultConfig.Instance) .WithOptions(ConfigOptions.StopOnFirstError | ConfigOptions.DisableLogFile) ); #endif Assert.IsFalse(summary.HasCriticalValidationErrors, "Critical validation error"); Assert.IsTrue(summary.ValidationErrors.IsEmpty, "Not critical validation errors"); Assert.IsFalse(summary.Reports.IsEmpty, "Here is no test results... Are you alright?"); Assert.IsTrue(summary.Reports.All(r => r.Success), "One of benchmarks has failed"); }
public void CustomTargetPlatformJobsAreGroupedByTargetFrameworkMoniker() { var net5Config = ManualConfig.Create(DefaultConfig.Instance) .AddJob(Job.Default.WithToolchain(CsProjCoreToolchain.NetCoreApp50)); var net5WindowsConfig1 = ManualConfig.Create(DefaultConfig.Instance) .AddJob(Job.Default.WithToolchain(CsProjCoreToolchain.From(new Toolchains.DotNetCli.NetCoreAppSettings( targetFrameworkMoniker: "net5.0-windows", runtimeFrameworkVersion: null, name: ".NET 5.0")))); // a different INSTANCE of CsProjCoreToolchain that also targets "net5.0-windows" var net5WindowsConfig2 = ManualConfig.Create(DefaultConfig.Instance) .AddJob(Job.Default.WithToolchain(CsProjCoreToolchain.From(new Toolchains.DotNetCli.NetCoreAppSettings( targetFrameworkMoniker: "net5.0-windows", runtimeFrameworkVersion: null, name: ".NET 5.0")))); var benchmarksNet5 = BenchmarkConverter.TypeToBenchmarks(typeof(Plain1), net5Config); var benchmarksNet5Windows1 = BenchmarkConverter.TypeToBenchmarks(typeof(Plain2), net5WindowsConfig1); var benchmarksNet5Windows2 = BenchmarkConverter.TypeToBenchmarks(typeof(Plain3), net5WindowsConfig2); var grouped = benchmarksNet5.BenchmarksCases .Union(benchmarksNet5Windows1.BenchmarksCases) .Union(benchmarksNet5Windows2.BenchmarksCases) .GroupBy(benchmark => benchmark, new BenchmarkPartitioner.BenchmarkRuntimePropertiesComparer()) .ToArray(); Assert.Equal(2, grouped.Length); Assert.Single(grouped, group => group.Count() == 3); // Plain1 (3 methods) runing against "net5.0" Assert.Single(grouped, group => group.Count() == 6); // Plain2 (3 methods) and Plain3 (3 methods) runing against "net5.0-windows" }
public static void Main(string[] args) { var config = ManualConfig .Create(DefaultConfig.Instance) .With(new CustomMarkdownExporter()) .With(StatisticColumn.Max) .With(StatisticColumn.Min) .With(StatisticColumn.Mean) .With(StatisticColumn.Median) .With(StatisticColumn.OperationsPerSecond) .With(MemoryDiagnoser.Default) .With(ExecutionValidator.FailOnError) .With(Job.Default.With(Runtime.Core)) .With(Job.Default.With(Runtime.Clr)); BenchmarkRunner.Run <Core.DocumentSearchBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.Result.DocumentResultBenchmarks>(config); BenchmarkRunner.Run <Solr4.Search.Result.FacetsResultBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.Result.DocumentResultBenchmarks>(config); BenchmarkRunner.Run <Solr5.Search.Result.FacetsResultBenchmarks>(config); Console.Read(); }
public static void Main(string[] args) { Console.WriteLine("Initiating benchmarking"); var config = ManualConfig .Create(DefaultConfig.Instance); // ensure numbers in csv do not have units var csvExporter = new CsvExporter( CsvSeparator.CurrentCulture, new SummaryStyle() { PrintUnitsInHeader = true, PrintUnitsInContent = false, TimeUnit = BenchmarkDotNet.Horology.TimeUnit.Millisecond, }); config.Add(csvExporter); // I experimented with removing outliers but I think other processes on the system create outliers more // than abnormal data access patterns config.Add( Job.Default.WithRemoveOutliers(true)); var switcher = new BenchmarkSwitcher(new[] { typeof(GetFileBenchmark), typeof(FileExistsBenchmark) }); var summary = switcher.Run(args, config); Console.WriteLine("Benchmarking complete"); }
static void Main(string[] args) { var run = Job.MediumRun; // var run = Job.ShortRun; Job jobCore21 = run.With(Jit.RyuJit).With(CoreRuntime.Core21).WithId($"Core 2.1"); Job jobCore31 = run.With(Jit.RyuJit).With(CoreRuntime.Core31).WithId($"Core 3.1"); Job jobCore50 = run.With(Jit.RyuJit).With(CoreRuntime.Core50).WithId($"Core 5.0"); IConfig config = ManualConfig.Create(DefaultConfig.Instance); config = config.With(new[] { jobCore21, jobCore31, jobCore50 }); MonoRuntime monoRuntime = MonoRuntime.Default; if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { config = config.With(run.With(ClrRuntime.Net48).WithId("FW 4.8")); } else { config = config.With(new[] { run.With(Jit.Llvm).With(monoRuntime).WithId("Mono+LLVM") }); config = config.With(new[] { run.With(monoRuntime).WithId("Mono legacy") }); } config = config.With(JsonExporter.Custom(fileNameSuffix: "-full", indentJson: true, excludeMeasurements: false)); config = config.With(JsonExporter.Custom(fileNameSuffix: "-brief", indentJson: true, excludeMeasurements: true)); config = config.With(MarkdownExporter.Default); config = config.With(HtmlExporter.Default); config = config.With(CsvExporter.Default); var summary = BenchmarkRunner.Run(typeof(Program).Assembly, config); }
public static void Run() { BenchmarkRunner.Run <IntroEncodingFluentConfig>( ManualConfig .Create(DefaultConfig.Instance) .With(Encoding.Unicode)); }
private static IConfig CreateConfig() { // We use runtime as selector later. It is chosen as selector just to be close to initial issue. Nothing particularly special about it. Job coreJob = new Job(Job.Default).With(Runtime.Core).ApplyAndFreeze(RunMode.Dry); Job clrJob = new Job(Job.Default).With(Runtime.Clr).ApplyAndFreeze(RunMode.Dry); return(ManualConfig.Create(DefaultConfig.Instance).With(coreJob).With(clrJob)); }
public void TextColumnIsLeftJustified() { var config = ManualConfig.Create(DefaultConfig.Instance).With(new ParamColumn("Param")); var summary = MockFactory.CreateSummary(config); var table = new SummaryTable(summary); Assert.Equal(SummaryTable.SummaryTableColumn.TextJustification.Left, table.Columns.First(c => c.Header == "Param").Justify); }