public void ExtraColumnsCanBeDefined() { var logger = new OutputLogger(Output); var columns = new[] { StatisticColumn.StdDev, StatisticColumn.Min, StatisticColumn.Q1, StatisticColumn.Median, StatisticColumn.Q3, StatisticColumn.Max, StatisticColumn.OperationsPerSecond, StatisticColumn.P85, StatisticColumn.P95, StatisticColumn.P95 }; var config = DefaultConfig.Instance .With(Job.Dry.WithTargetCount(10).WithIterationTime(10)) .With(logger) .With(columns); var summary = CanExecute <Target>(config); var table = summary.Table; var headerRow = table.FullHeader; foreach (var column in columns) { Assert.True(headerRow.Contains(column.ColumnName)); } }
private IConfig CreateInProcessConfig(OutputLogger logger) { return(new ManualConfig() .With(Job.Dry.With(new InProcessEmitToolchain(TimeSpan.Zero, true)).WithInvocationCount(UnrollFactor).WithUnrollFactor(UnrollFactor)) .With(logger ?? (Output != null ? new OutputLogger(Output) : ConsoleLogger.Default)) .With(DefaultColumnProviders.Instance)); }
public void ExtraColumnsCanBeDefined() { var logger = new OutputLogger(Output); var columns = new[] { StatisticColumn.StdDev, StatisticColumn.Min, StatisticColumn.Q1, StatisticColumn.Median, StatisticColumn.Q3, StatisticColumn.Max, StatisticColumn.OperationsPerSecond, StatisticColumn.P85, StatisticColumn.P95, StatisticColumn.P95 }; var config = DefaultConfig.Instance .With(Job.Dry.WithTargetCount(10).WithIterationTime(10)) .With(logger) .With(columns); var summary = CanExecute<Target>(config); var table = summary.Table; var headerRow = table.FullHeader; foreach (var column in columns) Assert.True(headerRow.Contains(column.ColumnName)); }
public void RunStrategiesAreSupported() { var logger = new OutputLogger(Output); var config = ManualConfig.CreateEmpty() .With(DefaultColumnProviders.Instance) .With(logger) .With(new Job(Job.Dry) { Run = { RunStrategy = RunStrategy.ColdStart } }) .With(new Job(Job.Dry) { Run = { RunStrategy = RunStrategy.Throughput } }); var results = CanExecute <ModeBenchmarks>(config); Assert.Equal(4, results.Benchmarks.Count()); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.ColdStart && b.Target.Method.Name == "BenchmarkWithVoid")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.ColdStart && b.Target.Method.Name == "BenchmarkWithReturnValue")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.Throughput && b.Target.Method.Name == "BenchmarkWithVoid")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.Throughput && b.Target.Method.Name == "BenchmarkWithReturnValue")); string testLog = logger.GetLog(); Assert.Contains("// ### Benchmark with void called ###", testLog); Assert.Contains("// ### Benchmark with return value called ###", testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); }
public void ExtraColumnsCanBeDefined() { var logger = new OutputLogger(Output); var columns = new[] { StatisticColumn.StdDev, StatisticColumn.Min, StatisticColumn.Q1, StatisticColumn.Median, StatisticColumn.Q3, StatisticColumn.Max, StatisticColumn.OperationsPerSecond, StatisticColumn.P85, StatisticColumn.P95, StatisticColumn.P95 }; var config = ManualConfig.CreateEmpty().With(CreateJob()).With(logger).With(columns); var summary = CanExecute <Target>(config); var table = summary.Table; var headerRow = table.FullHeader; foreach (var column in columns) { Assert.Contains(column.ColumnName, headerRow); } }
public void InProcessBenchmarkAllCasesDiagnoserTest() { var logger = new OutputLogger(Output); var config = new ManualConfig() .With(Job.Default.With(InProcessToolchain.Instance)) .With(MemoryDiagnoser.Default) .With(logger) .With(DefaultColumnProviders.Instance); try { BenchmarkAllCases.Counter = 0; var summary = CanExecute <BenchmarkAllocates>(config); var testLog = logger.GetLog(); Assert.Contains("// Benchmark: BenchmarkAllocates.Allocates:", testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); Assert.True(summary.Reports.Sum(r => r.GcStats.AllocatedBytes) > 0); } finally { BenchmarkAllCases.Counter = 0; } }
public void ParamsSupportPropertyWithPublicSetter() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); var summary = CanExecute <PriorityBenchmark>(config); var columns = summary.Table.Columns; var aColumn = columns.First(col => col.Header == "A"); var bColumn = columns.First(col => col.Header == "b"); var cColumn = columns.First(col => col.Header == "c"); var dColumn = columns.First(col => col.Header == "d"); var eColumn = columns.First(col => col.Header == "E"); var fColumn = columns.First(col => col.Header == "F"); Assert.NotNull(aColumn); Assert.NotNull(bColumn); Assert.NotNull(cColumn); Assert.NotNull(dColumn); Assert.NotNull(eColumn); Assert.NotNull(fColumn); Assert.True(aColumn.OriginalColumn.PriorityInCategory == -100); Assert.True(bColumn.OriginalColumn.PriorityInCategory == -10); Assert.True(cColumn.OriginalColumn.PriorityInCategory == 0); Assert.True(dColumn.OriginalColumn.PriorityInCategory == 0); Assert.True(eColumn.OriginalColumn.PriorityInCategory == 10); Assert.True(fColumn.OriginalColumn.PriorityInCategory == 50); Assert.True(aColumn.Index < bColumn.Index); Assert.True(bColumn.Index < cColumn.Index); Assert.True(cColumn.Index < dColumn.Index); Assert.True(dColumn.Index < eColumn.Index); Assert.True(eColumn.Index < fColumn.Index); }
private IConfig CreateInProcessConfig(BenchmarkActionCodegen codegenMode, OutputLogger logger = null, IDiagnoser diagnoser = null) { return(new ManualConfig() .AddJob(Job.Dry.WithToolchain(new InProcessToolchain(TimeSpan.Zero, codegenMode, true)).WithInvocationCount(UnrollFactor).WithUnrollFactor(UnrollFactor)) .AddLogger(logger ?? (Output != null ? new OutputLogger(Output) : ConsoleLogger.Default)) .AddColumnProvider(DefaultColumnProviders.Instance)); }
public void Test() { var logger = new OutputLogger(output); var config = DefaultConfig.Instance.With(logger); var summary = BenchmarkRunner.Run<PerformanceUnitTest>(config); // Sanity checks, to be sure that the different benchmarks actually run var testOutput = logger.GetLog(); Assert.Contains("// ### Slow Benchmark called ###" + Environment.NewLine, testOutput); Assert.Contains("// ### Fast Benchmark called ###" + Environment.NewLine, testOutput); // Check that slow benchmark is actually slower than the fast benchmark! var slowBenchmarkRun = summary.GetRunsFor<PerformanceUnitTest>(r => r.SlowBenchmark()).First(); var fastBenchmarkRun = summary.GetRunsFor<PerformanceUnitTest>(r => r.FastBenchmark()).First(); Assert.True(slowBenchmarkRun.GetAverageNanoseconds() > fastBenchmarkRun.GetAverageNanoseconds(), string.Format("Expected SlowBenchmark: {0:N2} ns to be MORE than FastBenchmark: {1:N2} ns", slowBenchmarkRun.GetAverageNanoseconds(), fastBenchmarkRun.GetAverageNanoseconds())); Assert.True(slowBenchmarkRun.GetOpsPerSecond() < fastBenchmarkRun.GetOpsPerSecond(), string.Format("Expected SlowBenchmark: {0:N2} Ops to be LESS than FastBenchmark: {1:N2} Ops", slowBenchmarkRun.GetOpsPerSecond(), fastBenchmarkRun.GetOpsPerSecond())); // Whilst we're at it, let's do more specific Asserts as we know what the elasped time should be var slowBenchmarkReport = summary.GetReportFor<PerformanceUnitTest>(r => r.SlowBenchmark()); var fastBenchmarkReport = summary.GetReportFor<PerformanceUnitTest>(r => r.FastBenchmark()); foreach (var slowRun in slowBenchmarkReport.GetResultRuns()) Assert.InRange(slowRun.GetAverageNanoseconds() / 1000.0 / 1000.0, low: 98, high: 102); foreach (var fastRun in fastBenchmarkReport.GetResultRuns()) Assert.InRange(fastRun.GetAverageNanoseconds() / 1000.0 / 1000.0, low: 14, high: 17); }
public void ModesAreSupported() { var logger = new OutputLogger(Output); var config = ManualConfig.CreateEmpty() .With(DefaultConfig.Instance.GetColumns().ToArray()) .With(logger) .With(Job.Dry.With(Mode.SingleRun)) .With(Job.Dry.With(Mode.Throughput)); var results = CanExecute <ModeBenchmarks>(config); Assert.Equal(4, results.Benchmarks.Count()); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithVoid")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithReturnValue")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithVoid")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithReturnValue")); var testLog = logger.GetLog(); Assert.Contains("// ### Benchmark with void called ###", testLog); Assert.Contains("// ### Benchmark with return value called ###", testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); }
public void CanExecuteWithNonDefaultUiCulture(string culture) { var originCulture = CultureInfo.CurrentCulture; var originUiCulture = CultureInfo.CurrentUICulture; try { var overrideCulture = CultureInfo.GetCultureInfo(culture); Assert.NotNull(overrideCulture); Assert.False(overrideCulture.IsNeutralCulture); CultureInfo.CurrentCulture = overrideCulture; CultureInfo.CurrentUICulture = overrideCulture; var logger = new OutputLogger(Output); var miniJob = Job.Dry.With(RoslynToolchain.Instance); var config = CreateSimpleConfig(logger, miniJob); CanExecute <SimpleBenchmarks>(config); } finally { CultureInfo.CurrentCulture = originCulture; CultureInfo.CurrentUICulture = originUiCulture; } }
protected IConfig CreateSimpleConfig(OutputLogger logger = null, Job job = null) { var baseConfig = job == null ? (IConfig)new SingleRunFastConfig() : new SingleJobConfig(job); return baseConfig .With(logger ?? (Output != null ? new OutputLogger(Output) : ConsoleLogger.Default)) .With(DefaultColumnProviders.Instance); }
public void GlobalCleanupTargetSpecificMethodTest() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <GlobalCleanupAttributeTargetBenchmarks>(config); string log = logger.GetLog(); Assert.Contains(BaselineBenchmarkCalled + Environment.NewLine, log); Assert.True( log.IndexOf(BaselineBenchmarkCalled + Environment.NewLine) < log.IndexOf(BaselineGlobalCleanupCalled + Environment.NewLine)); Assert.Contains(FirstGlobalCleanupCalled + Environment.NewLine, log); Assert.Contains(FirstBenchmarkCalled + Environment.NewLine, log); Assert.True( log.IndexOf(FirstBenchmarkCalled + Environment.NewLine) < log.IndexOf(FirstGlobalCleanupCalled + Environment.NewLine)); Assert.Contains(SecondGlobalCleanupCalled + Environment.NewLine, log); Assert.Contains(SecondBenchmarkCalled + Environment.NewLine, log); Assert.True( log.IndexOf(SecondBenchmarkCalled + Environment.NewLine) < log.IndexOf(SecondGlobalCleanupCalled + Environment.NewLine)); }
public void AllSetupAndCleanupMethodRunsForSpecificBenchmark() { var logger = new OutputLogger(Output); var miniJob = Job.Default.With(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute <Benchmarks>(config); Output.WriteLine(OutputDelimiter); Output.WriteLine(OutputDelimiter); Output.WriteLine(OutputDelimiter); var firstActualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(FirstPrefix)).ToArray(); foreach (string line in firstActualLogLines) { Output.WriteLine(line); } Assert.Equal(firstExpectedLogLines, firstActualLogLines); var secondActualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(SecondPrefix)).ToArray(); foreach (string line in secondActualLogLines) { Output.WriteLine(line); } Assert.Equal(secondExpectedLogLines, secondActualLogLines); }
private void Verify(Runtime runtime, Jit jit, Platform platform, string expectedText) { var logger = new OutputLogger(Output); var config = new PlatformConfig(runtime, jit, platform).With(logger).With(DefaultColumnProviders.Instance); BenchmarkRunner.Run(new[] { BenchmarkConverter.TypeToBenchmarks(typeof(TestBenchmark), config) }); Assert.Contains(expectedText, logger.GetLog()); }
public void ExceptionMessageIsNotLost() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <ThrowingException>(config, fullValidation: false); Assert.Contains(ErrorMessage, logger.GetLog()); }
protected IConfig CreateSimpleConfig(OutputLogger logger = null, Job job = null) { var baseConfig = job == null ? (IConfig) new SingleRunFastConfig() : new SingleJobConfig(job); return(baseConfig .AddLogger(logger ?? (Output != null ? new OutputLogger(Output) : ConsoleLogger.Default)) .AddColumnProvider(DefaultColumnProviders.Instance) .AddAnalyser(DefaultConfig.Instance.GetAnalysers().ToArray())); }
public void ParamsSupportPropertyWithPublicSetter() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute<ParamsTestProperty>(config); foreach (var param in new[] { 1, 2 }) Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); Assert.DoesNotContain($"// ### New Parameter {default(int)} ###" + Environment.NewLine, logger.GetLog()); }
public void Test() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute<ParamsTestStaticField>(config); foreach (var param in new[] { 1, 2 }) Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog()); }
public void ParamsSupportFSharpEnums() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <EnumParamsTest>(config); foreach (var param in new[] { TestEnum.A, TestEnum.B }) { Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); } Assert.DoesNotContain($"// ### New Parameter {TestEnum.C} ###" + Environment.NewLine, logger.GetLog()); }
public void TestPowerPlanShouldNotChange() { var userPlan = PowerManagementHelper.CurrentPlan; var logger = new OutputLogger(Output); var powerManagementApplier = new PowerManagementApplier(logger); var config = DefaultConfig.Instance.With(logger); powerManagementApplier.ApplyPerformancePlan(PowerPlan.UserPowerPlan); Assert.Equal(userPlan.ToString(), PowerManagementHelper.CurrentPlan.ToString()); powerManagementApplier.ApplyUserPowerPlan(); Assert.Equal(userPlan, PowerManagementHelper.CurrentPlan); }
private void Verify(Platform platform, Type benchmark, string failureText) { var logger = new OutputLogger(Output); // make sure we get an output in the TestRunner log var config = new PlatformConfig(platform).With(logger).With(DefaultColumnProviders.Instance); CanExecute(benchmark, config); var testLog = logger.GetLog(); Assert.DoesNotContain(failureText, testLog); Assert.DoesNotContain(BenchmarkNotFound, testLog); }
public void ColdStart() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <ColdStartBench>(config); string log = logger.GetLog(); Assert.Contains($"{CounterPrefix}1", log); Assert.DoesNotContain($"{CounterPrefix}2", log); }
public void ParamsSupportPropertyWithPublicSetter() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <ParamsTestProperty>(config); foreach (var param in new[] { 1, 2 }) { Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); } Assert.DoesNotContain($"// ### New Parameter {default(int)} ###" + Environment.NewLine, logger.GetLog()); }
public void TestSettingAndRevertingBackGuid() { var userPlan = PowerManagementHelper.CurrentPlan; var logger = new OutputLogger(Output); var powerManagementApplier = new PowerManagementApplier(logger); var config = DefaultConfig.Instance.With(logger); powerManagementApplier.ApplyPerformancePlan(PowerPlan.HighPerformance); Assert.Equal(HighPerformancePlanGuid, PowerManagementHelper.CurrentPlan.ToString()); Assert.Equal("High performance", PowerManagementHelper.CurrentPlanFriendlyName); powerManagementApplier.ApplyUserPowerPlan(); Assert.Equal(userPlan, PowerManagementHelper.CurrentPlan); }
public void Test() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <ParamsTestStaticField>(config); foreach (var param in new[] { 1, 2 }) { Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog()); } Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog()); }
public void TestPowerPlanShouldNotChange() { var userPlan = PowerManagementHelper.CurrentPlan; var logger = new OutputLogger(Output); var powerManagementApplier = new PowerManagementApplier(logger); powerManagementApplier.ApplyPerformancePlan(PowerManagementApplier.Map(PowerPlan.UserPowerPlan)); Assert.Equal(userPlan.ToString(), PowerManagementHelper.CurrentPlan.ToString()); powerManagementApplier.Dispose(); Assert.Equal(userPlan, PowerManagementHelper.CurrentPlan); }
public void CleanupMethodRunsTest() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute<CleanupAttributeBenchmarks>(config); string log = logger.GetLog(); Assert.Contains(CleanupCalled + Environment.NewLine, log); Assert.True( log.IndexOf(CleanupCalled + Environment.NewLine) > log.IndexOf(BenchmarkCalled + Environment.NewLine)); }
public void InnerClassesAreSupported() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <Inner>(config); var testLog = logger.GetLog(); Assert.Contains("// ### BenchmarkInnerClass method called ###" + Environment.NewLine, testLog); Assert.Contains("// ### BenchmarkGenericInnerClass method called ###" + Environment.NewLine, testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); }
private void Verify(Platform platform, Type benchmark, string failureText) { var logger = new OutputLogger(Output); // make sure we get an output in the TestRunner log var config = new PlatformConfig(platform) .With(logger) .With(DefaultConfig.Instance.GetColumns().ToArray()); CanExecute(benchmark, config); var testLog = logger.GetLog(); Assert.DoesNotContain(failureText, testLog); Assert.DoesNotContain(BenchmarkNotFound, testLog); }
public void GlobalCleanupMethodRunsTest() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute <GlobalCleanupAttributeBenchmarks>(config); string log = logger.GetLog(); Assert.Contains(GlobalCleanupCalled + System.Environment.NewLine, log); Assert.True( log.IndexOf(GlobalCleanupCalled + System.Environment.NewLine) > log.IndexOf(BenchmarkCalled + System.Environment.NewLine)); }
public void InProcessBenchmarkEmitsSameIL(Type benchmarkType) { var logger = new OutputLogger(Output); var config = CreateInProcessAndRoslynConfig(logger); var summary = CanExecute(benchmarkType, config); DiffEmit(summary); string testLog = logger.GetLog(); Assert.Contains(benchmarkType.Name, testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); }
public void GenericClassesAreSupported() { var logger = new OutputLogger(Output); var config = CreateSimpleConfig(logger); CanExecute<FlatClassBenchmark>(config); var expected1 = $"// ### Benchmark: SerializationLibrary1, Type: {typeof(FlatClassBenchmark).Name} ###"; Assert.Contains(expected1, logger.GetLog()); logger.ClearLog(); CanExecute<DoubleArrayBenchmark>(config); var expected2 = $"// ### Benchmark: SerializationLibrary2, Type: {typeof(DoubleArrayBenchmark).Name} ###"; Assert.Contains(expected2, logger.GetLog()); }
public void InProcessEmitToolchainSupportsIterationSetupAndCleanup() { var logger = new OutputLogger(Output); var config = CreateInProcessConfig(logger); WithIterationSetupAndCleanup.SetupCounter = 0; WithIterationSetupAndCleanup.BenchmarkCounter = 0; WithIterationSetupAndCleanup.CleanupCounter = 0; var summary = CanExecute <WithIterationSetupAndCleanup>(config); Assert.Equal(1, WithIterationSetupAndCleanup.SetupCounter); Assert.Equal(16, WithIterationSetupAndCleanup.BenchmarkCounter); Assert.Equal(1, WithIterationSetupAndCleanup.CleanupCounter); }
private void Verify(Platform platform, Type benchmark, string failureText) { var logger = new OutputLogger(Output); var config = ManualConfig.CreateEmpty() .With(Job.Dry.With(platform)) .With(logger); // make sure we get an output in the TestRunner log CanExecute(benchmark, config); var testLog = logger.GetLog(); Assert.DoesNotContain(failureText, testLog); Assert.DoesNotContain(BenchmarkNotFound, testLog); }
public void CustomToolchain() { var logger = new OutputLogger(output); var generator = new MyGenerator(); var builder = new MyBuilder(); var executor = new MyExecutor(); var myToolchain = new Toolchain("My", generator, builder, executor); var job = Job.Default.With(myToolchain).With(Mode.SingleRun).WithLaunchCount(1).WithWarmupCount(1).WithTargetCount(1); var config = DefaultConfig.Instance.With(job).With(logger); BenchmarkRunner.Run<ToolchainTest>(config); Assert.True(generator.Done); Assert.True(builder.Done); Assert.True(executor.Done); }
public void AllSetupAndCleanupMethodRunsAsyncTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute <AllSetupAndCleanupAttributeBenchmarksAsync>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) { Output.WriteLine(line); } Assert.Equal(expectedLogLines, actualLogLines); }
public void CustomToolchainsAreSupported() { var logger = new OutputLogger(Output); var generator = new MyGenerator(); var builder = new MyBuilder(); var executor = new MyExecutor(); var myToolchain = new Toolchain("My", generator, builder, executor); var job = Job.Dry.With(myToolchain); var config = CreateSimpleConfig(logger).With(job); CanExecute<ToolchainBenchmark>(config, fullValidation: false); Assert.True(generator.Done); Assert.True(builder.Done); Assert.True(executor.Done); }
public void ModesAreSupported() { var logger = new OutputLogger(Output); var config = ManualConfig.CreateEmpty() .With(DefaultConfig.Instance.GetColumns().ToArray()) .With(logger) .With(Job.Dry.With(Mode.SingleRun)) .With(Job.Dry.With(Mode.Throughput)); var results = CanExecute<ModeBenchmarks>(config); Assert.Equal(4, results.Benchmarks.Count()); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithVoid")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithReturnValue")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithVoid")); Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithReturnValue")); var testLog = logger.GetLog(); Assert.Contains("// ### Benchmark with void called ###", testLog); Assert.Contains("// ### Benchmark with return value called ###", testLog); Assert.DoesNotContain("No benchmarks found", logger.GetLog()); }