static bool Run() { bool success = false; try { // NOTE: this is mostly working around bugs in BenchmarkDotNet configuration var logger = new Logger(); var baseConfig = new DebugInProcessConfig(); var config = new ManualConfig(); foreach (var e in baseConfig.GetExporters()) { config.AddExporter(e); } foreach (var d in baseConfig.GetDiagnosers()) { config.AddDiagnoser(d); } foreach (var a in baseConfig.GetAnalysers()) { config.AddAnalyser(a); } foreach (var v in baseConfig.GetValidators()) { config.AddValidator(v); } foreach (var p in baseConfig.GetColumnProviders()) { config.AddColumnProvider(p); } config.AddJob(JobMode <Job> .Default.WithToolchain(new InProcessEmitToolchain(TimeSpan.FromMinutes(10), logOutput: true))); config.UnionRule = ConfigUnionRule.AlwaysUseGlobal; // Overriding the default config.AddLogger(logger); // ImageBenchmark class is hardcoded here for now BenchmarkRunner.Run <ImageBenchmark>(config.WithOptions(ConfigOptions.DisableLogFile)); BenchmarkRunner.Run <ViewHandlerBenchmark>(config.WithOptions(ConfigOptions.DisableLogFile)); success = true; } catch (Exception ex) { Log.Error(Tag, $"Error: {ex}"); } return(success); }
public void MemoryDiagnoserTracksHeapMemoryAllocation() { var benchmarks = BenchmarkConverter.TypeToBenchmarks(typeof(ListEnumeratorsBenchmarks)); var memoryDiagnoser = new Diagnostics.Windows.MemoryDiagnoser(); var summary = BenchmarkRunner .Run(benchmarks, ManualConfig.CreateEmpty() .With(Job.Dry.With(Runtime.Core).With(Jit.Host).With(Mode.Throughput).WithWarmupCount(1).WithTargetCount(1)) .With(DefaultConfig.Instance.GetLoggers().ToArray()) .With(DefaultConfig.Instance.GetColumns().ToArray()) .With(memoryDiagnoser) .With(new OutputLogger(output))); var gcCollectionColumns = memoryDiagnoser.GetColumns.OfType <Diagnostics.Windows.MemoryDiagnoser.GCCollectionColumn>().ToArray(); var listStructEnumeratorBenchmarks = benchmarks.Where(benchmark => benchmark.ShortInfo.Contains("ListStructEnumerator")); var listObjectEnumeratorBenchmarks = benchmarks.Where(benchmark => benchmark.ShortInfo.Contains("ListObjectEnumerator")); const int gen0Index = 0; foreach (var listStructEnumeratorBenchmark in listStructEnumeratorBenchmarks) { var structEnumeratorGen0Collections = gcCollectionColumns[gen0Index].GetValue( summary, listStructEnumeratorBenchmark); Assert.Equal("-", structEnumeratorGen0Collections); } foreach (var listObjectEnumeratorBenchmark in listObjectEnumeratorBenchmarks) { var gen0Str = gcCollectionColumns[gen0Index].GetValue( summary, listObjectEnumeratorBenchmark); double gen0Value; if (double.TryParse(gen0Str, NumberStyles.Number, HostEnvironmentInfo.MainCultureInfo, out gen0Value)) { Assert.True(gen0Value > 0); } else { Assert.True(false, $"Can't parse '{gen0Str}'"); } } }
public static void Main(string[] args) { var config = ManualConfig.CreateEmpty() .With(Job.Default.With(CsProjCoreToolchain.NetCoreApp11)) .With(DefaultConfig.Instance.GetLoggers().ToArray()) .With(DefaultConfig.Instance.GetColumnProviders().ToArray()) .With(MemoryDiagnoser.Default); Console.WriteLine(@"Choose an image resizing benchmarks: 0. Just run ""Load, Resize, Save"" once, don't benchmark 1. Resize 2. Load, resize, save 3. Load, resize, save in parallel "); switch (Console.ReadKey().Key) { case ConsoleKey.D0: var lrs = new LoadResizeSave(); lrs.ImageSharpBenchmark(); lrs.MagickResizeBenchmark(); lrs.FreeImageResizeBenchmark(); lrs.MagicScalerResizeBenchmark(); lrs.SkiaBitmapLoadResizeSaveBenchmark(); lrs.SkiaCanvasLoadResizeSaveBenchmark(); break; case ConsoleKey.D1: BenchmarkRunner.Run <Resize>(config); break; case ConsoleKey.D2: BenchmarkRunner.Run <LoadResizeSave>(config); break; case ConsoleKey.D3: BenchmarkRunner.Run <LoadResizeSaveParallel>(config); break; default: Console.WriteLine("Unrecognized command."); break; } }
/* * [Benchmark] * public int OrderByLinq() * { * return array.OrderBy((x => x -1)).Sum(); * } * * [Benchmark] * public int OrderByFast() * { * return array.OrderByF((x => x - 1)).Sum(); * } * * * * * [Benchmark] * public bool SequenceEqual() * { * return array.SequenceEqual(array2); * }*/ /* * [Benchmark] * public bool SequenceEqualP() * { * return array.SequenceEqualP(array2); * } * * [Benchmark] * public bool SequenceEqualS() * { * return array.SequenceEqualS(array2); * } * * [Benchmark] * public bool SequenceEqualSP() * { * return array.SequenceEqualSP(array2); * } * */ public static void Main(string[] args) { #if ORIGINAL int[] a = { 1, 2, 3, 4, 5, 6, 7, 8 }; int[] b = { 1, 0, 4, 4, 6, 7, 2, 10 }; var r = a.SequenceCompareS(b); a.Slice(2, 4).SumF(); foreach (var i in r) { Console.Write(i + ","); } var summary = BenchmarkRunner.Run <Benchmarks>(ManualConfig.Create(DefaultConfig.Instance).With(Job.RyuJitX64)); Console.ReadLine(); #else BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); #endif }
public void WhenTwoCustomTimeoutsAreProvidedTheLongerOneIsUsed(bool direction) { var oneSecond = ManualConfig.CreateEmpty().WithBuildTimeout(TimeSpan.FromSeconds(1)); var twoSeconds = ManualConfig.CreateEmpty().WithBuildTimeout(TimeSpan.FromSeconds(2)); if (direction) { oneSecond.Add(twoSeconds); } else { twoSeconds.Add(oneSecond); } var final = ImmutableConfigBuilder.Create(direction ? oneSecond : twoSeconds); Assert.Equal(TimeSpan.FromSeconds(2), final.BuildTimeout); }
public static void Run() { #if DEBUG var runner = new CachedFunctionWithEnumerableKeys(); runner.OneHit(); runner.OneMiss(); runner.OneHitAndOneMiss(); runner.OneHundredHits(); runner.OneHundredMisses(); runner.OneHundredHitsAndOneHundredMisses(); #else BenchmarkRunner.Run <CachedFunctionWithEnumerableKeys>(ManualConfig .Create(DefaultConfig.Instance) .With(Job.MediumRun.WithLaunchCount(1)) .With(MemoryDiagnoser.Default)); #endif }
/// <summary> /// https://github.com/dotnet/BenchmarkDotNet/blob/307b7250210fc3a7b7ff2c454bb79f793aae57ea/docs/guide/Configs/Jobs.md /// </summary> public QuickRunJobAttribute() { var job = Job.Dry .With(Platform.X64) .With(Jit.RyuJit) .With(Runtime.Core) .WithWarmupCount(5) .WithLaunchCount(1) .WithTargetCount(5) .WithId("QuickRun"); var config = ManualConfig.CreateEmpty(); config.Add(RPlotExporter.Default); config.Add(MarkdownExporter.GitHub); Config = config.With(job); }
public static void Main(string[] args) { var defaultConfig = DefaultConfig.Instance; var config = new ManualConfig() .AddColumnProvider(defaultConfig.GetColumnProviders().ToArray()) .AddLogger(defaultConfig.GetLoggers().ToArray()) .AddAnalyser(defaultConfig.GetAnalysers().ToArray()) .AddValidator(defaultConfig.GetValidators().ToArray()) .WithUnionRule(defaultConfig.UnionRule) .WithSummaryStyle(defaultConfig.SummaryStyle) .WithArtifactsPath(defaultConfig.ArtifactsPath) .AddDiagnoser(MemoryDiagnoser.Default) .AddJob(Job.Default.WithToolchain(InProcessEmitToolchain.Instance)) .WithOrderer(new DefaultOrderer(SummaryOrderPolicy.Method, MethodOrderPolicy.Alphabetical)) .AddExporter(MarkdownExporter.GitHub, JsonExporter.FullCompressed); BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args, config); }
private static Summary CreateMockSummary(bool printUnitsInContent, bool printUnitsInHeader, TimeUnit timeUnit, double metricValue) { var summaryStyle = new SummaryStyle(TestCultureInfo.Instance, printUnitsInHeader, null, timeUnit, printUnitsInContent); var config = new ManualConfig().WithSummaryStyle(summaryStyle); var benchmarkCase = new BenchmarkCase( new Descriptor(null, null), Job.Dry, new ParameterInstances(ImmutableArray <ParameterInstance> .Empty), ImmutableConfigBuilder.Create(config)); var metric = new Metric(LocalMetricDescriptor.TimeInstance, metricValue); var benchmarkReport = new BenchmarkReport(true, benchmarkCase, null, null, null, new List <Metric>() { metric }); return(new Summary("", new[] { benchmarkReport }.ToImmutableArray(), HostEnvironmentInfo.GetCurrent(), "", "", TimeSpan.Zero, CultureInfo.InvariantCulture, ImmutableArray <ValidationError> .Empty, ImmutableArray <IColumnHidingRule> .Empty)); }
public void UniqueIdTest() { var config = ManualConfig. Create(DefaultConfig.Instance). With(StatisticColumn.Mean). With(StatisticColumn.Mean). With(StatisticColumn.StdDev). With(StatisticColumn.Mean). With(StatisticColumn.Mean). With(StatisticColumn.P67); var summary = CreateSummary(config); var columns = summary.GetColumns(); Assert.Equal(1, columns.Count(c => c.Id == StatisticColumn.Mean.Id)); Assert.Equal(1, columns.Count(c => c.Id == StatisticColumn.StdDev.Id)); Assert.Equal(1, columns.Count(c => c.Id == StatisticColumn.P67.Id)); }
static void Main(string[] args) { var config = ManualConfig.Create(DefaultConfig.Instance); config.Add(new MemoryDiagnoser()); config.Add(new CsvExporter( CsvSeparator.CurrentCulture, new BenchmarkDotNet.Reports.SummaryStyle { PrintUnitsInHeader = true, PrintUnitsInContent = false, TimeUnit = BenchmarkDotNet.Horology.TimeUnit.Millisecond, SizeUnit = BenchmarkDotNet.Columns.SizeUnit.KB } )); config.Add(new MemoryDiagnoser()); var summary = BenchmarkRunner.Run <MemoryTests>(config); }
public static ReadOnlyConfig GetFullConfig(Type type, IConfig config) { config = config ?? DefaultConfig.Instance; if (type != null) { var typeAttributes = type.GetTypeInfo().GetCustomAttributes(true).OfType <IConfigSource>(); var assemblyAttributes = type.GetTypeInfo().Assembly.GetCustomAttributes().OfType <IConfigSource>(); var allAttributes = typeAttributes.Concat(assemblyAttributes); var configs = allAttributes.Select(attribute => attribute.Config) .OrderBy(c => c.GetJobs().Count(job => job.Meta.IsMutator)); // configs with mutators must be the ones applied at the end foreach (var configFromAttribute in configs) { config = ManualConfig.Union(config, configFromAttribute); } } return(config.AsReadOnly()); }
private static void Main(string[] args) { var config = ManualConfig.Create(DefaultConfig.Instance) //.With(RPlotExporter.Default) .With(MarkdownExporter.GitHub) .With(MemoryDiagnoser.Default) //.With(StatisticColumn.Min) //.With(StatisticColumn.Max) //.With(RankColumn.Arabic) .With(Job.Core) .With(Job.Clr) //.With(Job.ShortRun) //.With(Job.ShortRun.With(BenchmarkDotNet.Environments.Platform.X64).WithWarmupCount(1).WithIterationCount(1)) .WithArtifactsPath(null) ; BenchmarkRunner.Run <Benchmark>(config); }
public static IConfig GetCustomConfig() { var defaultConfig = DefaultConfig.Instance; var config = new ManualConfig() .AddColumnProvider(defaultConfig.GetColumnProviders().ToArray()) .AddLogger(defaultConfig.GetLoggers().ToArray()) .AddAnalyser(defaultConfig.GetAnalysers().ToArray()) .AddValidator(defaultConfig.GetValidators().ToArray()) .WithUnionRule(defaultConfig.UnionRule) .WithSummaryStyle(defaultConfig.SummaryStyle) .WithArtifactsPath(defaultConfig.ArtifactsPath) .AddDiagnoser(MemoryDiagnoser.Default) .WithOrderer(new DefaultOrderer(SummaryOrderPolicy.Method, MethodOrderPolicy.Alphabetical)) .AddExporter(MarkdownExporter.GitHub, JsonExporter.FullCompressed); return(config); }
static void Main(string[] args) { var config = ManualConfig.Create(DefaultConfig.Instance); // Set up an results exporter. // Note. By default results files will be located in .\BenchmarkDotNet.Artifacts\results directory. config.Add(new CsvExporter(CsvSeparator.CurrentCulture, new BenchmarkDotNet.Reports.SummaryStyle { PrintUnitsInHeader = true, PrintUnitsInContent = false, TimeUnit = TimeUnit.Microsecond, SizeUnit = BenchmarkDotNet.Columns.SizeUnit.KB })); // Run benchmarks. var summary = BenchmarkRunner.Run <FunctionBenchmarks>(config); }
private static ImmutableConfig GetFullMethodConfig(MethodInfo method, ImmutableConfig typeConfig) { var methodAttributes = method.GetCustomAttributes(true).OfType <IConfigSource>(); if (!methodAttributes.Any()) // the most common case { return(typeConfig); } var config = ManualConfig.Create(typeConfig); foreach (var configFromAttribute in methodAttributes) { config = ManualConfig.Union(config, configFromAttribute.Config); } return(ImmutableConfigBuilder.Create(config)); }
static void Main(string[] args) { var report = BenchmarkRunner.Run <AccessEvidenceBenchmarks>( ManualConfig.Create(DefaultConfig.Instance) .WithOptions(ConfigOptions.DisableOptimizationsValidator)); bool fail = false; List <string> failDetail = new List <string>(); List <string> failedCategories = new List <string>(); Dictionary <string, int> maxTimes = new Dictionary <string, int>() { { "Accessor-FlowData", 300 }, { "Accessor-AsDictionary", 400 }, { "Accessor-StringValues", 800 } }; foreach (var category in maxTimes) { var reportsForCategory = report.Reports.Where(r => r.BenchmarkCase.Descriptor.HasCategory(category.Key)); if (reportsForCategory.Any(r => r.ResultStatistics.Percentiles.P95 > category.Value)) { fail = true; var failedReports = reportsForCategory.Where(r => r.ResultStatistics.Percentiles.P95 > category.Value).ToList(); var names = string.Join(", ", failedReports.Select(s => s.BenchmarkCase.Descriptor.WorkloadMethod.Name)); var times = string.Join(", ", failedReports.Select(s => s.ResultStatistics.Percentiles.P95.ToString("0.00"))); failDetail.Add($"{names} were over the limit of " + $"{category.Value}ns at the 95th percentile. " + $"Actual times: {times}. See test output for full report."); failedCategories.Add(category.Key); } } if (fail) { // write out the full benchmark report. Console.Write(report); throw new Exception($"Benchmarks outside limits for the following " + $"categories: {string.Join(", ", failedCategories)}.\r\n\t" + string.Join("\r\n\t", failDetail)); } }
public static void Main(string[] args) { /** Uncomment to test using performance profiler */ if (args?.FirstOrDefault() == "profile") { //var benchmark = new LoadSimpleBenchmark(); var benchmark = new LoadComplexBenchmark(); //var benchmark = new SaveSimpleBenchmark(); //var benchmark = new SaveComplexBenchmark(); for (int i = 0; i < 1000; i++) { benchmark.PortableXaml(); //benchmark.PortableXamlNoCache(); //benchmark.SystemXaml(); //benchmark.SystemXamlNoCache(); } return; } /**/ // BenchmarkSwitcher doesn't automatically exclude abstract benchmark classes var types = typeof(MainClass) .Assembly .GetExportedTypes() .Where(r => typeof(IXamlBenchmark).IsAssignableFrom(r) && !r.IsAbstract); var config = new ManualConfig(); config.Add(DefaultConfig.Instance.GetLoggers().ToArray()); config.Add(DefaultConfig.Instance.GetExporters().ToArray()); config.Add(DefaultConfig.Instance.GetColumnProviders().ToArray()); config.Add(JitOptimizationsValidator.DontFailOnError); config.Add(Job.Default); config.Add(MemoryDiagnoser.Default); config.Add(StatisticColumn.OperationsPerSecond); config.Add(RankColumn.Arabic); var switcher = new BenchmarkSwitcher(types.ToArray()); switcher.Run(args, config); }
public void TestAssemblyFilePathIsUsedWhenTheAssemblyLocationIsNotEmpty() { const string programName = "testProgram"; var benchmarkMethod = typeof(MockFactory.MockBenchmarkClass) .GetTypeInfo() .GetMethods() .Single(method => method.Name == nameof(MockFactory.MockBenchmarkClass.Foo)); var target = new Descriptor(typeof(MockFactory.MockBenchmarkClass), benchmarkMethod); var benchmarkCase = BenchmarkCase.Create(target, Job.Default, null, ManualConfig.CreateEmpty().CreateImmutableConfig()); var benchmarks = new[] { new BenchmarkBuildInfo(benchmarkCase, ManualConfig.CreateEmpty().CreateImmutableConfig(), 0) }; var projectGenerator = new SteamLoadedBuildPartition("netcoreapp3.0", null, null, null); var buildPartition = new BuildPartition(benchmarks, new Resolver()); string binariesPath = projectGenerator.ResolvePathForBinaries(buildPartition, programName); string expectedPath = Path.Combine(Path.GetDirectoryName(buildPartition.AssemblyLocation), programName); Assert.Equal(expectedPath, binariesPath); }
public SingleRunBenchmarkJobAttribute() { var job = new Job("BenchmarkJob"); job.Env.Gc.Force = true; job.Run.UnrollFactor = 1; job.Run.InvocationCount = 1; job.Run.WarmupCount = 1; job.Run.TargetCount = 1; job.Run.RunStrategy = RunStrategy.Monitoring; if (!BenchmarkConfig.Instance.RunIterations) { job.Run.RunStrategy = RunStrategy.ColdStart; job.Run.TargetCount = 1; } Config = ManualConfig.CreateEmpty().With(job); }
static void Main(string[] args) { Console.WriteLine("STARTING BENCHMARKING"); ResetBookTable(); var config = new ManualConfig() .WithOptions(ConfigOptions.JoinSummary) .WithOptions(ConfigOptions.DisableLogFile) .WithOptions(ConfigOptions.DisableOptimizationsValidator) .AddValidator(JitOptimizationsValidator.DontFailOnError) .AddLogger(ConsoleLogger.Default) .AddColumnProvider(DefaultColumnProviders.Instance); BenchmarkRunner.Run <InsertComparison>(config); BenchmarkRunner.Run <SelectComparison>(config); Console.WriteLine("FINISHING BENCHMARKING"); }
public void UserCanDefineCustomSummaryStyle() { var summaryStyle = new SummaryStyle ( printUnitsInHeader: true, printUnitsInContent: false, printZeroValuesInContent: true, sizeUnit: SizeUnit.B, timeUnit: TimeUnit.Millisecond ); var config = ManualConfig.CreateEmpty().With(summaryStyle); Assert.True(config.SummaryStyle.PrintUnitsInHeader); Assert.False(config.SummaryStyle.PrintUnitsInContent); Assert.True(config.SummaryStyle.PrintZeroValuesInContent); Assert.Equal(SizeUnit.B, config.SummaryStyle.SizeUnit); Assert.Equal(TimeUnit.Millisecond, config.SummaryStyle.TimeUnit); }
public void StopOnFirstErrorIsRespected(bool value) { var config = ManualConfig.CreateEmpty() .AddJob(Job.Dry) .AddDiagnoser(MemoryDiagnoser.Default) // crucial to repro the bug .WithOption(ConfigOptions.StopOnFirstError, value); var summary = CanExecute <MoreThanOneNonThrowingBenchmark>(config, fullValidation: false); if (value) { Assert.Equal(1, summary.Reports.Count(report => !report.Success)); } else { Assert.Equal(3, summary.Reports.Count(report => report.Success)); Assert.Equal(4, summary.Reports.Count(report => !report.Success)); } }
private IEnumerable <Summary> RunBenchmarks(string[] args) { var globalChronometer = Chronometer.Start(); var summaries = new List <Summary>(); if (ShouldDisplayOptions(args)) { DisplayOptions(); return(Enumerable.Empty <Summary>()); } var config = ManualConfig.Union(DefaultConfig.Instance, ManualConfig.Parse(args)); foreach (var typeWithMethods in typeParser.MatchingTypesWithMethods(args)) { logger.WriteLineHeader("Target type: " + typeWithMethods.Type.Name); if (typeWithMethods.AllMethodsInType) { summaries.Add(BenchmarkRunner.Run(typeWithMethods.Type, config)); } else { summaries.Add(BenchmarkRunner.Run(typeWithMethods.Type, typeWithMethods.Methods, config)); } logger.WriteLine(); } // TODO: move this logic to the RunUrl method #if CLASSIC if (args.Length > 0 && (args[0].StartsWith("http://") || args[0].StartsWith("https://"))) { var url = args[0]; Uri uri = new Uri(url); var name = uri.IsFile ? Path.GetFileName(uri.LocalPath) : "URL"; summaries.Add(BenchmarkRunner.RunUrl(url, config)); } #endif var clockSpan = globalChronometer.Stop(); BenchmarkRunner.LogTotalTime(logger, clockSpan.GetTimeSpan(), "Global total time"); return(summaries); }
static void Main(string[] args) { var configuration = new ManualConfig(); configuration.KeepBenchmarkFiles = false; configuration.Add(StatisticColumn.Min); configuration.Add(StatisticColumn.Max); configuration.Add(DefaultConfig.Instance.GetColumnProviders().ToArray()); configuration.Add(DefaultConfig.Instance.GetLoggers().ToArray()); configuration.Add(DefaultConfig.Instance.GetDiagnosers().ToArray()); configuration.Add(DefaultConfig.Instance.GetAnalysers().ToArray()); configuration.Add(DefaultConfig.Instance.GetJobs().ToArray()); configuration.Add(DefaultConfig.Instance.GetValidators().ToArray()); BenchmarkRunner.Run <SimpleCsvTester>(configuration); //var tester = new AsyncVsSyncTest(); //for (int i = 0; i != 10; ++i) //{ // tester.SyncTest(); //} //var stopwatch = Stopwatch.StartNew(); //string syncResult = tester.SyncTest(); //stopwatch.Stop(); //Console.Out.WriteLine($"Sync Execution Time: {stopwatch.Elapsed}"); //Console.Out.WriteLine($"Sync Result Count: {syncResult.Length}"); //for (int i = 0; i != 10; ++i) //{ // tester.AsyncTest().Wait(); //} //stopwatch.Restart(); //string asyncResult = tester.AsyncTest().Result; //stopwatch.Stop(); //Console.Out.WriteLine($"Async Execution Time: {stopwatch.Elapsed}"); //Console.Out.WriteLine($"Async Result Count: {asyncResult.Length}"); Console.Out.Write("Hit <enter> to exit..."); Console.In.ReadLine(); }
public static void Main() { var configuration = ManualConfig.CreateEmpty(); configuration.AddJob(Job.Default .WithWarmupCount(1) .WithIterationTime(TimeInterval.FromMilliseconds(250)) .WithMinIterationCount(15) .WithMaxIterationCount(20) .WithToolchain(InProcessEmitToolchain.Instance)); configuration.AddDiagnoser(MemoryDiagnoser.Default); configuration.AddColumnProvider(DefaultConfig.Instance.GetColumnProviders().ToArray()); configuration.AddLogger(ConsoleLogger.Default); configuration.AddExporter(new SimpleBenchmarkExporter()); configuration.SummaryStyle = SummaryStyle.Default .WithTimeUnit(TimeUnit.Nanosecond) .WithSizeUnit(SizeUnit.B); BenchmarkRunner.Run <JsonRpcMiddlewareBenchmarks>(configuration); }
public void CustomNuGetJobsAreGroupedByPackageVersion() { var config = ManualConfig.Create(DefaultConfig.Instance) .With(Job.Default.WithNuGet("AutoMapper", "7.0.1")) .With(Job.Default.WithNuGet("AutoMapper", "7.0.0-alpha-0001")); var benchmarks1 = BenchmarkConverter.TypeToBenchmarks(typeof(Plain1), config); var benchmarks2 = BenchmarkConverter.TypeToBenchmarks(typeof(Plain2), config); var grouped = benchmarks1.BenchmarksCases.Union(benchmarks2.BenchmarksCases) .GroupBy(benchmark => benchmark, new BenchmarkPartitioner.BenchmarkRuntimePropertiesComparer()) .ToArray(); Assert.Equal(2, grouped.Length); // 7.0.1 + 7.0.0-alpha-0001 foreach (var grouping in grouped) { Assert.Equal(3 * 2, grouping.Count()); // (M1 + M2 + M3) * (Plain1 + Plain2) } }
static void Main(string[] args) { var email = new EmailSmtpClientMSOneProperty(); email.Setup(); Console.WriteLine(email.GetHostReflection()); Console.WriteLine(email.GetHostViaDictionary()); Console.WriteLine(email.GetHostViaSwitch()); BenchmarkRunner.Run <EmailSmtpClientMSOneProperty>( ManualConfig .Create(DefaultConfig.Instance) .WithOption(ConfigOptions.DisableOptimizationsValidator, true) ); BenchmarkRunner.Run <EmailSmtpClientMSMultipleProperties>( ManualConfig .Create(DefaultConfig.Instance) .WithOption(ConfigOptions.DisableOptimizationsValidator, true) ); }
static void Main(string[] args) { var manualConfig = ManualConfig.Create(DefaultConfig.Instance); manualConfig.Add(new MemoryDiagnoser()); //manualConfig.Add(new BenchmarkDotNet.Diagnostics.Windows.InliningDiagnoser()); //manualConfig.Add(HardwareCounter.BranchMispredictions, HardwareCounter.BranchInstructions); var config = manualConfig .With(Job.Clr.With(Jit.LegacyJit)) .With(Job.Clr.With(Jit.RyuJit)) .With(Job.Core.With(Jit.RyuJit)); var switcher = new BenchmarkSwitcher(new[] { typeof(LeadingZeroCount.LeadingZeroCount64BitBenchmark), typeof(LeadingZeroCount.LeadingZeroCount32BitBenchmark), typeof(Recording.Recording32BitBenchmark), }); switcher.Run(args, config); }
/// <param name="maxDepth">Includes called methods to given level. 1 by default, indexed from 1. To print just the benchmark set it to 0.</param> /// <param name="printSource">C#|F#|VB source code will be printed. False by default.</param> /// <param name="printInstructionAddresses">Print instruction addresses. False by default</param> /// <param name="exportGithubMarkdown">Exports to GitHub markdown. True by default.</param> /// <param name="exportHtml">Exports to HTML with clickable links. False by default.</param> /// <param name="exportCombinedDisassemblyReport">Exports all benchmarks to a single HTML report. Makes it easy to compare different runtimes or methods (each becomes a column in HTML table).</param> /// <param name="exportDiff">Exports a diff of the assembly code to the Github markdown format. False by default.</param> public DisassemblyDiagnoserAttribute( int maxDepth = 1, bool printSource = false, bool printInstructionAddresses = false, bool exportGithubMarkdown = true, bool exportHtml = false, bool exportCombinedDisassemblyReport = false, bool exportDiff = false) { Config = ManualConfig.CreateEmpty().AddDiagnoser( new DisassemblyDiagnoser( new DisassemblyDiagnoserConfig( maxDepth: maxDepth, printSource: printSource, printInstructionAddresses: printInstructionAddresses, exportGithubMarkdown: exportGithubMarkdown, exportHtml: exportHtml, exportCombinedDisassemblyReport: exportCombinedDisassemblyReport, exportDiff: exportDiff))); }