Esempio n. 1
0
        public void RunStrategiesAreSupported()
        {
            var logger = new OutputLogger(Output);
            var config = ManualConfig.CreateEmpty()
                         .With(DefaultColumnProviders.Instance)
                         .With(logger)
                         .With(new Job(Job.Dry)
            {
                Run = { RunStrategy = RunStrategy.ColdStart }
            })
                         .With(new Job(Job.Dry)
            {
                Run = { RunStrategy = RunStrategy.Throughput }
            });

            var results = CanExecute <ModeBenchmarks>(config);

            Assert.Equal(4, results.Benchmarks.Count());

            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.ColdStart && b.Target.Method.Name == "BenchmarkWithVoid"));
            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.ColdStart && b.Target.Method.Name == "BenchmarkWithReturnValue"));

            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.Throughput && b.Target.Method.Name == "BenchmarkWithVoid"));
            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Run.RunStrategy == RunStrategy.Throughput && b.Target.Method.Name == "BenchmarkWithReturnValue"));

            string testLog = logger.GetLog();

            Assert.Contains("// ### Benchmark with void called ###", testLog);
            Assert.Contains("// ### Benchmark with return value called ###", testLog);
            Assert.DoesNotContain("No benchmarks found", logger.GetLog());
        }
Esempio n. 2
0
        public void AllSetupAndCleanupMethodRunsForSpecificBenchmark()
        {
            var logger  = new OutputLogger(Output);
            var miniJob = Job.Default.With(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob");
            var config  = CreateSimpleConfig(logger, miniJob);

            CanExecute <Benchmarks>(config);
            Output.WriteLine(OutputDelimiter);
            Output.WriteLine(OutputDelimiter);
            Output.WriteLine(OutputDelimiter);

            var firstActualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(FirstPrefix)).ToArray();

            foreach (string line in firstActualLogLines)
            {
                Output.WriteLine(line);
            }
            Assert.Equal(firstExpectedLogLines, firstActualLogLines);

            var secondActualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(SecondPrefix)).ToArray();

            foreach (string line in secondActualLogLines)
            {
                Output.WriteLine(line);
            }
            Assert.Equal(secondExpectedLogLines, secondActualLogLines);
        }
Esempio n. 3
0
        public void InProcessBenchmarkAllCasesDiagnoserTest()
        {
            var logger = new OutputLogger(Output);
            var config = new ManualConfig()
                         .With(Job.Default.With(InProcessToolchain.Instance))
                         .With(MemoryDiagnoser.Default)
                         .With(logger)
                         .With(DefaultColumnProviders.Instance);

            try
            {
                BenchmarkAllCases.Counter = 0;

                var summary = CanExecute <BenchmarkAllocates>(config);

                var testLog = logger.GetLog();
                Assert.Contains("// Benchmark: BenchmarkAllocates.Allocates:", testLog);
                Assert.DoesNotContain("No benchmarks found", logger.GetLog());

                Assert.True(summary.Reports.Sum(r => r.GcStats.AllocatedBytes) > 0);
            }
            finally
            {
                BenchmarkAllCases.Counter = 0;
            }
        }
Esempio n. 4
0
        public void ModesAreSupported()
        {
            var logger = new OutputLogger(Output);
            var config = ManualConfig.CreateEmpty()
                         .With(DefaultConfig.Instance.GetColumns().ToArray())
                         .With(logger)
                         .With(Job.Dry.With(Mode.SingleRun))
                         .With(Job.Dry.With(Mode.Throughput));

            var results = CanExecute <ModeBenchmarks>(config);

            Assert.Equal(4, results.Benchmarks.Count());

            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithVoid"));
            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithReturnValue"));

            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithVoid"));
            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithReturnValue"));

            var testLog = logger.GetLog();

            Assert.Contains("// ### Benchmark with void called ###", testLog);
            Assert.Contains("// ### Benchmark with return value called ###", testLog);
            Assert.DoesNotContain("No benchmarks found", logger.GetLog());
        }
        public void ParamsSupportPropertyWithPublicSetter()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute<ParamsTestProperty>(config);
            foreach (var param in new[] { 1, 2 })
                Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog());
            Assert.DoesNotContain($"// ### New Parameter {default(int)} ###" + Environment.NewLine, logger.GetLog());
        }
        public void Test()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute<ParamsTestStaticField>(config);

            foreach (var param in new[] { 1, 2 })
                Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog());
            Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog());
        }
Esempio n. 7
0
        public void InnerClassesAreSupported()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <Inner>(config);

            var testLog = logger.GetLog();

            Assert.Contains("// ### BenchmarkInnerClass method called ###" + Environment.NewLine, testLog);
            Assert.Contains("// ### BenchmarkGenericInnerClass method called ###" + Environment.NewLine, testLog);
            Assert.DoesNotContain("No benchmarks found", logger.GetLog());
        }
        public void InProcessBenchmarkEmitsSameIL(Type benchmarkType)
        {
            var logger = new OutputLogger(Output);
            var config = CreateInProcessAndRoslynConfig(logger);

            var summary = CanExecute(benchmarkType, config);

            DiffEmit(summary);

            string testLog = logger.GetLog();

            Assert.Contains(benchmarkType.Name, testLog);
            Assert.DoesNotContain("No benchmarks found", logger.GetLog());
        }
Esempio n. 9
0
        public void GenericClassesAreSupported()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute<FlatClassBenchmark>(config);
            var expected1 = $"// ### Benchmark: SerializationLibrary1, Type: {typeof(FlatClassBenchmark).Name} ###";
            Assert.Contains(expected1, logger.GetLog());

            logger.ClearLog();
            CanExecute<DoubleArrayBenchmark>(config);
            var expected2 = $"// ### Benchmark: SerializationLibrary2, Type: {typeof(DoubleArrayBenchmark).Name} ###";
            Assert.Contains(expected2, logger.GetLog());
        }
        public void Test()
        {
            var logger = new OutputLogger(output);
            var config = DefaultConfig.Instance.With(logger);
            var summary = BenchmarkRunner.Run<PerformanceUnitTest>(config);

            // Sanity checks, to be sure that the different benchmarks actually run
            var testOutput = logger.GetLog();
            Assert.Contains("// ### Slow Benchmark called ###" + Environment.NewLine, testOutput);
            Assert.Contains("// ### Fast Benchmark called ###" + Environment.NewLine, testOutput);

            // Check that slow benchmark is actually slower than the fast benchmark!
            var slowBenchmarkRun = summary.GetRunsFor<PerformanceUnitTest>(r => r.SlowBenchmark()).First();
            var fastBenchmarkRun = summary.GetRunsFor<PerformanceUnitTest>(r => r.FastBenchmark()).First();
            Assert.True(slowBenchmarkRun.GetAverageNanoseconds() > fastBenchmarkRun.GetAverageNanoseconds(),
                        string.Format("Expected SlowBenchmark: {0:N2} ns to be MORE than FastBenchmark: {1:N2} ns",
                                      slowBenchmarkRun.GetAverageNanoseconds(), fastBenchmarkRun.GetAverageNanoseconds()));
            Assert.True(slowBenchmarkRun.GetOpsPerSecond() < fastBenchmarkRun.GetOpsPerSecond(),
                        string.Format("Expected SlowBenchmark: {0:N2} Ops to be LESS than FastBenchmark: {1:N2} Ops",
                                      slowBenchmarkRun.GetOpsPerSecond(), fastBenchmarkRun.GetOpsPerSecond()));

            // Whilst we're at it, let's do more specific Asserts as we know what the elasped time should be
            var slowBenchmarkReport = summary.GetReportFor<PerformanceUnitTest>(r => r.SlowBenchmark());
            var fastBenchmarkReport = summary.GetReportFor<PerformanceUnitTest>(r => r.FastBenchmark());
            foreach (var slowRun in slowBenchmarkReport.GetResultRuns())
                Assert.InRange(slowRun.GetAverageNanoseconds() / 1000.0 / 1000.0, low: 98, high: 102);
            foreach (var fastRun in fastBenchmarkReport.GetResultRuns())
                Assert.InRange(fastRun.GetAverageNanoseconds() / 1000.0 / 1000.0, low: 14, high: 17);
        }
        public void GlobalCleanupTargetSpecificMethodTest()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <GlobalCleanupAttributeTargetBenchmarks>(config);

            string log = logger.GetLog();

            Assert.Contains(BaselineBenchmarkCalled + Environment.NewLine, log);
            Assert.True(
                log.IndexOf(BaselineBenchmarkCalled + Environment.NewLine) <
                log.IndexOf(BaselineGlobalCleanupCalled + Environment.NewLine));

            Assert.Contains(FirstGlobalCleanupCalled + Environment.NewLine, log);
            Assert.Contains(FirstBenchmarkCalled + Environment.NewLine, log);
            Assert.True(
                log.IndexOf(FirstBenchmarkCalled + Environment.NewLine) <
                log.IndexOf(FirstGlobalCleanupCalled + Environment.NewLine));

            Assert.Contains(SecondGlobalCleanupCalled + Environment.NewLine, log);
            Assert.Contains(SecondBenchmarkCalled + Environment.NewLine, log);
            Assert.True(
                log.IndexOf(SecondBenchmarkCalled + Environment.NewLine) <
                log.IndexOf(SecondGlobalCleanupCalled + Environment.NewLine));
        }
Esempio n. 12
0
        public void Test()
        {
            var logger = new OutputLogger(output);
            var config = DefaultConfig.Instance.With(logger);

            BenchmarkRunner.Run <FlatClassBenchmark>(config);
            var expected1 = $"// ### Benchmark: SerializationLibrary1, Type: {typeof(FlatClassBenchmark).Name} ###";

            Assert.Contains(expected1, logger.GetLog());

            logger.ClearLog();
            BenchmarkRunner.Run <DoubleArrayBenchmark>(config);
            var expected2 = $"// ### Benchmark: SerializationLibrary2, Type: {typeof(DoubleArrayBenchmark).Name} ###";

            Assert.Contains(expected2, logger.GetLog());
        }
Esempio n. 13
0
        private void Verify(Runtime runtime, Jit jit, Platform platform, string expectedText)
        {
            var logger = new OutputLogger(Output);
            var config = new PlatformConfig(runtime, jit, platform).With(logger).With(DefaultColumnProviders.Instance);

            BenchmarkRunner.Run(new[] { BenchmarkConverter.TypeToBenchmarks(typeof(TestBenchmark), config) });

            Assert.Contains(expectedText, logger.GetLog());
        }
        public void ExceptionMessageIsNotLost()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <ThrowingException>(config, fullValidation: false);

            Assert.Contains(ErrorMessage, logger.GetLog());
        }
        private void Verify(Platform platform, Type benchmark, string failureText)
        {
            var logger = new OutputLogger(Output);
            // make sure we get an output in the TestRunner log
            var config = new PlatformConfig(platform).With(logger).With(DefaultColumnProviders.Instance);

            CanExecute(benchmark, config);
            var testLog = logger.GetLog();

            Assert.DoesNotContain(failureText, testLog);
            Assert.DoesNotContain(BenchmarkNotFound, testLog);
        }
Esempio n. 16
0
        public void ColdStart()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <ColdStartBench>(config);

            string log = logger.GetLog();

            Assert.Contains($"{CounterPrefix}1", log);
            Assert.DoesNotContain($"{CounterPrefix}2", log);
        }
        public void CleanupMethodRunsTest()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute<CleanupAttributeBenchmarks>(config);

            string log = logger.GetLog();
            Assert.Contains(CleanupCalled + Environment.NewLine, log);
            Assert.True(
                log.IndexOf(CleanupCalled + Environment.NewLine) >
                log.IndexOf(BenchmarkCalled + Environment.NewLine));
        }
        private void Verify(Platform platform, Type benchmark, string failureText)
        {
            var logger = new OutputLogger(Output);
            // make sure we get an output in the TestRunner log
            var config = new PlatformConfig(platform)
                                .With(logger)
                                .With(DefaultConfig.Instance.GetColumns().ToArray());

            CanExecute(benchmark, config);
            var testLog = logger.GetLog();

            Assert.DoesNotContain(failureText, testLog);
            Assert.DoesNotContain(BenchmarkNotFound, testLog);
        }
Esempio n. 19
0
        public void GlobalCleanupMethodRunsTest()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <GlobalCleanupAttributeBenchmarks>(config);

            string log = logger.GetLog();

            Assert.Contains(GlobalCleanupCalled + System.Environment.NewLine, log);
            Assert.True(
                log.IndexOf(GlobalCleanupCalled + System.Environment.NewLine) >
                log.IndexOf(BenchmarkCalled + System.Environment.NewLine));
        }
        private void Verify(Platform platform, Type benchmark, string failureText)
        {
            var logger = new OutputLogger(Output);

            var config = ManualConfig.CreateEmpty()
                         .With(Job.Dry.With(platform))
                         .With(logger); // make sure we get an output in the TestRunner log

            CanExecute(benchmark, config);

            var testLog = logger.GetLog();

            Assert.DoesNotContain(failureText, testLog);
            Assert.DoesNotContain(BenchmarkNotFound, testLog);
        }
Esempio n. 21
0
        public void InProcessBenchmarkAllCasesDelegateCombineSupported()
        {
            var logger = new OutputLogger(Output);
            var config = CreateInProcessConfig(BenchmarkActionCodegen.DelegateCombine, logger);

            try
            {
                BenchmarkAllCases.Counter = 0;

                var summary = CanExecute <BenchmarkAllCases>(config);

                var testLog = logger.GetLog();
                Assert.Contains("// Benchmark: BenchmarkAllCases.InvokeOnceVoid:", testLog);
                Assert.DoesNotContain("No benchmarks found", logger.GetLog());

                // Operations + Setup + Cleanup
                var expectedCount = summary.Reports.SelectMany(r => r.AllMeasurements).Sum(m => m.Operations + 2);
                Assert.Equal(expectedCount, BenchmarkAllCases.Counter);
            }
            finally
            {
                BenchmarkAllCases.Counter = 0;
            }
        }
        public void InProcessBenchmarkSimpleCasesReflectionEmitSupported()
        {
            var logger = new OutputLogger(Output);
            var config = CreateInProcessConfig(logger);

            try
            {
                BenchmarkAllCases.Counter = 0;

                var summary = CanExecute <BenchmarkAllCases>(config);

                string testLog = logger.GetLog();
                Assert.Contains("// Benchmark: BenchmarkAllCases.InvokeOnceVoid:", testLog);
                Assert.DoesNotContain("No benchmarks found", logger.GetLog());

                // Operations + GlobalSetup + GlobalCleanup
                long expectedCount = summary.Reports.SelectMany(r => r.AllMeasurements).Sum(m => m.Operations + 2);
                Assert.Equal(expectedCount, BenchmarkAllCases.Counter);
            }
            finally
            {
                BenchmarkAllCases.Counter = 0;
            }
        }
Esempio n. 23
0
        public void ModesAreSupported()
        {
            var logger = new OutputLogger(Output);
            var config = ManualConfig.CreateEmpty()
                .With(DefaultConfig.Instance.GetColumns().ToArray())
                .With(logger)
                .With(Job.Dry.With(Mode.SingleRun))
                .With(Job.Dry.With(Mode.Throughput));

            var results = CanExecute<ModeBenchmarks>(config);

            Assert.Equal(4, results.Benchmarks.Count());

            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithVoid"));
            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.SingleRun && b.Target.Method.Name == "BenchmarkWithReturnValue"));

            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithVoid"));
            Assert.Equal(1, results.Benchmarks.Count(b => b.Job.Mode == Mode.Throughput && b.Target.Method.Name == "BenchmarkWithReturnValue"));

            var testLog = logger.GetLog();
            Assert.Contains("// ### Benchmark with void called ###", testLog);
            Assert.Contains("// ### Benchmark with return value called ###", testLog);
            Assert.DoesNotContain("No benchmarks found", logger.GetLog());
        }
Esempio n. 24
0
        public void AllSetupAndCleanupMethodRunsAsyncTest()
        {
            var logger  = new OutputLogger(Output);
            var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob");
            var config  = CreateSimpleConfig(logger, miniJob);

            CanExecute <AllSetupAndCleanupAttributeBenchmarksAsync>(config);

            var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray();

            foreach (string line in actualLogLines)
            {
                Output.WriteLine(line);
            }
            Assert.Equal(expectedLogLines, actualLogLines);
        }
Esempio n. 25
0
        public void Test()
        {
            var logger  = new OutputLogger(output);
            var config  = DefaultConfig.Instance.With(logger);
            var summary = BenchmarkRunner.Run <PerformanceUnitTest>(config);

            // Sanity checks, to be sure that the different benchmarks actually run
            var testOutput = logger.GetLog();

            Assert.Contains("// ### Slow Benchmark called ###" + Environment.NewLine, testOutput);
            Assert.Contains("// ### Fast Benchmark called ###" + Environment.NewLine, testOutput);

            // Check that slow benchmark is actually slower than the fast benchmark!
            var slowBenchmarkRun = summary.GetRunsFor <PerformanceUnitTest>(r => r.SlowBenchmark()).First();
            var fastBenchmarkRun = summary.GetRunsFor <PerformanceUnitTest>(r => r.FastBenchmark()).First();

            Assert.True(slowBenchmarkRun.GetAverageNanoseconds() > fastBenchmarkRun.GetAverageNanoseconds(),
                        string.Format("Expected SlowBenchmark: {0:N2} ns to be MORE than FastBenchmark: {1:N2} ns",
                                      slowBenchmarkRun.GetAverageNanoseconds(), fastBenchmarkRun.GetAverageNanoseconds()));
            Assert.True(slowBenchmarkRun.GetOpsPerSecond() < fastBenchmarkRun.GetOpsPerSecond(),
                        string.Format("Expected SlowBenchmark: {0:N2} Ops to be LESS than FastBenchmark: {1:N2} Ops",
                                      slowBenchmarkRun.GetOpsPerSecond(), fastBenchmarkRun.GetOpsPerSecond()));

            // Whilst we're at it, let's do more specific Asserts as we know what the elasped time should be
            var slowBenchmarkReport = summary.GetReportFor <PerformanceUnitTest>(r => r.SlowBenchmark());
            var fastBenchmarkReport = summary.GetReportFor <PerformanceUnitTest>(r => r.FastBenchmark());

            foreach (var slowRun in slowBenchmarkReport.GetResultRuns())
            {
                Assert.InRange(slowRun.GetAverageNanoseconds() / 1000.0 / 1000.0, low: 98, high: 102);
            }
            foreach (var fastRun in fastBenchmarkReport.GetResultRuns())
            {
                Assert.InRange(fastRun.GetAverageNanoseconds() / 1000.0 / 1000.0, low: 14, high: 17);
            }
        }
Esempio n. 26
0
        public void ParamsSupportFSharpEnums()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <EnumParamsTest>(config);
            foreach (var param in new[] { TestEnum.A, TestEnum.B })
            {
                Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog());
            }
            Assert.DoesNotContain($"// ### New Parameter {TestEnum.C} ###" + Environment.NewLine, logger.GetLog());
        }
Esempio n. 27
0
        public void ParamsSupportPropertyWithPublicSetter()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <ParamsTestProperty>(config);
            foreach (var param in new[] { 1, 2 })
            {
                Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog());
            }
            Assert.DoesNotContain($"// ### New Parameter {default(int)} ###" + Environment.NewLine, logger.GetLog());
        }
        public void Test()
        {
            var logger = new OutputLogger(Output);
            var config = CreateSimpleConfig(logger);

            CanExecute <ParamsTestStaticField>(config);

            foreach (var param in new[] { 1, 2 })
            {
                Assert.Contains($"// ### New Parameter {param} ###" + Environment.NewLine, logger.GetLog());
            }
            Assert.DoesNotContain($"// ### New Parameter 0 ###" + Environment.NewLine, logger.GetLog());
        }