private static BenchmarkCase[] CreateBenchmarks(IConfig config) { return(BenchmarkConverter.TypeToBenchmarks(typeof(MockBenchmarkClass), config).BenchmarksCases); }
public void MultipleParamsDoNotMultiplyGlobalSetup() { var validationErrors = ExecutionValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(typeof(MultipleParamsAndSingleGlobalSetup))); Assert.Empty(validationErrors); }
private static BenchmarkReport[] CreateReports(IConfig config) { BenchmarkRunInfo benchmarkRunInfo = BenchmarkConverter.TypeToBenchmarks(typeof(MockBenchmarkClass), config); return(benchmarkRunInfo.BenchmarksCases.Select(CreateReport).ToArray()); }
public void FieldsWithoutParamsValuesAreDiscovered() { Assert.Empty(BenchmarkConverter.TypeToBenchmarks(typeof(FieldsWithoutParamsValues)).BenchmarksCases); }
public void NonFailingBenchmarksAreOmitted() { var validationErrors = ExecutionValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(typeof(NonFailingBenchmark))); Assert.Empty(validationErrors); }
public void NonFailingGlobalCleanupsAreOmitted() { var validationErrors = ExecutionValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(typeof(GlobalCleanupThatRequiresParamsToBeSetFirst))); Assert.Empty(validationErrors); }
public void PropertyWithoutPublicSetterParamsAreDiscovered() { Assert.Throws <InvalidOperationException>( () => ExecutionValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(typeof(PropertyWithoutPublicSetterParams)))); }
private static void AssertConsistent <TBenchmark>() { var validationErrors = ReturnValueValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(typeof(TBenchmark))).ToList(); Assert.Empty(validationErrors); }
public void Benchmark_Class_Methods_Must_Be_Non_Static(Type type, bool hasErrors) { var validationErrors = CompilationValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(type)); Assert.Equal(hasErrors, validationErrors.Any()); }
private IList <Benchmark> CreateBenchmarks(Type targetBenchmarkType) { return(BenchmarkConverter.TypeToBenchmarks(targetBenchmarkType)); }
private static Benchmark CreateBenchmark(IConfig config) { return(BenchmarkConverter.TypeToBenchmarks(typeof(MockBenchmarkClass), config).First()); }
static void Main(string[] args) { // Pick a benchmark. var availableBenchmarks = Benchmarks.Benchmarks.All; if (args.Length == 0) { Console.WriteLine("Must provide the name of a benchmark class. (e.g. ./Autofac.BenchmarkProfiling.exe ChildScopeResolveBenchmark)"); Console.WriteLine("Possible benchmarks are:"); PrintBenchmarks(availableBenchmarks); return; } var inputType = args[0]; var selectedBenchmark = availableBenchmarks.FirstOrDefault(x => x.Name.Equals(inputType, StringComparison.InvariantCultureIgnoreCase)); if (selectedBenchmark is null) { Console.WriteLine("Specified benchmark does not exist."); PrintBenchmarks(availableBenchmarks); return; } var benchRunInfo = BenchmarkConverter.TypeToBenchmarks(selectedBenchmark); BenchmarkCase selectedCase = null; if (benchRunInfo.BenchmarksCases.Length == 0) { Console.WriteLine("No benchmark cases in specified benchmark."); return; } else if (benchRunInfo.BenchmarksCases.Length == 1) { selectedCase = benchRunInfo.BenchmarksCases[0]; } else { // Multiple benchmark cases. Has one been supplied? if (args.Length > 1) { if (uint.TryParse(args[1], out var selection)) { if (selection < benchRunInfo.BenchmarksCases.Length) { selectedCase = benchRunInfo.BenchmarksCases[selection]; } else { Console.WriteLine("Invalid benchmark case number provided. Possible options are: "); PrintCases(benchRunInfo); } } else { Console.WriteLine("Cannot parse provided benchmark case selection."); return; } } else { Console.WriteLine("Specified benchmark has multiple possible cases; a single case must be specified. Possible options are:"); PrintCases(benchRunInfo); return; } } var benchInstance = Activator.CreateInstance(selectedCase.Descriptor.Type); var setupAction = BenchmarkActionFactory.CreateGlobalSetup(selectedCase.Descriptor, benchInstance); var cleanupAction = BenchmarkActionFactory.CreateGlobalCleanup(selectedCase.Descriptor, benchInstance); // Workload method is generated differently when BenchmarkDotNet actually runs; we'll need to wrap it in the set of parameters. // It's way slower than they way they do it, but it should still give us good profiler results. Action <int> workloadAction = (repeat) => { while (repeat > 0) { selectedCase.Descriptor.WorkloadMethod.Invoke(benchInstance, selectedCase.Parameters.Items.Select(x => x.Value).ToArray()); repeat--; } }; setupAction.InvokeSingle(); // Warmup. workloadAction(100); // Now start a new thread. var runThread = new Thread(new ThreadStart(() => { // Do a lot. workloadAction(10000); })) { Name = "Workload Thread" }; runThread.Start(); runThread.Join(); cleanupAction.InvokeSingle(); }
public void FailingBenchmarksAreDiscovered() { var validationErrors = ExecutionValidator.FailOnError.Validate(BenchmarkConverter.TypeToBenchmarks(typeof(FailingBenchmark))); Assert.NotEmpty(validationErrors); }
private Benchmark[] CreateBenchmarks(Type[] types) { return(types.SelectMany(type => BenchmarkConverter.TypeToBenchmarks(type)).ToArray()); }