public void ReflectionDiscoveryCanFindBestFittingConfiguratorViaReflection(Type measurementType, Type expectedConfiguratorType)
        {
            var allConfigurators = ReflectionDiscovery.LoadAllTypeConfigurators(measurementType.Assembly, _output);
            var actualMatch      = ReflectionDiscovery.FindBestMatchingConfiguratorForMeasurement(measurementType, allConfigurators);

            Assert.True(expectedConfiguratorType == actualMatch);
        }
        public void ShouldNotFindCleanupMethod()
        {
            var cleanupMethod = ReflectionDiscovery.GetCleanupMethod(SimpleBenchmarkTypeInfo);

            Assert.Null(cleanupMethod.InvocationMethod);
            Assert.True(cleanupMethod.Skip);
        }
        public void ShouldFindCleanupMethod()
        {
            var cleanupMethod = ReflectionDiscovery.GetCleanupMethod(ComplexBenchmarkTypeInfo);

            Assert.NotNull(cleanupMethod.InvocationMethod);
            Assert.False(cleanupMethod.Skip);
        }
        public void ShouldDetermineIfMethodUsesContext()
        {
            var setupMethod = ReflectionDiscovery.GetSetupMethod(ComplexBenchmarkTypeInfo);
            var usesContext = ReflectionDiscovery.MethodTakesBenchmarkContext(setupMethod.InvocationMethod);

            Assert.True(usesContext);
        }
Beispiel #5
0
        public static IEnumerable Benchmarks()
        {
            var discovery = new ReflectionDiscovery(new ActionBenchmarkOutput(report => { }, results =>
            {
                foreach (var assertion in results.AssertionResults)
                {
                    Assert.True(assertion.Passed, results.BenchmarkName + " " + assertion.Message);
                    Console.WriteLine(assertion.Message);
                }

                foreach (var nameToMetric in results.Data.StatsByMetric)
                {
                    var stats = nameToMetric.Value.Stats;

                    Console.WriteLine($"Metric: {nameToMetric.Key} [Unit: {nameToMetric.Value.Unit}]: ");
                    Console.WriteLine($"Min: {stats.Min}");
                    Console.WriteLine($"Max: {stats.Max}");
                    Console.WriteLine($"Average: {stats.Average}");
                    Console.WriteLine($"Std Err: {stats.StandardError}");
                    Console.WriteLine($"Std Dev: {stats.StandardDeviation}");
                }
            }));

            var benchmarks = discovery.FindBenchmarks(typeof(T)).ToList();

            foreach (var benchmark in benchmarks)
            {
                var name = benchmark.BenchmarkName.Split('+')[1];
                yield return(new TestCaseData(benchmark).SetName(name));
            }
        }
Beispiel #6
0
        public void Should_NOT_write_trace_messages_in_user_spec_to_output_when_runner_has_NOT_enabled_tracing()
        {
            var methodOutput = new List <string>();
            var discovery    = new ReflectionDiscovery(new ActionBenchmarkOutput(writeLineAction: str =>
            {
                methodOutput.Add(str);
            }), DefaultBenchmarkAssertionRunner.Instance, new RunnerSettings()
            {
                TracingEnabled = false
            });                                                                                             //disabled tracing

            var benchmarks = discovery.FindBenchmarks(typeof(TracingBenchmark)).ToList();

            Assert.Equal(1, benchmarks.Count); // sanity check
            foreach (var benchmark in benchmarks)
            {
                benchmark.Run();
            }

            var setupTraces   = methodOutput.Count(x => x.Contains(SetupTrace));
            var runTraces     = methodOutput.Count(x => x.Contains(RunTrace));
            var cleanupTraces = methodOutput.Count(x => x.Contains(CleanupTrace));

            Assert.Equal(0, setupTraces);
            Assert.Equal(0, runTraces);
            Assert.Equal(0, cleanupTraces);
        }
Beispiel #7
0
        public void Setup()
        {
            var benchmarkData = ReflectionDiscovery.CreateBenchmarksForClass(typeof(MemoryAllocationSpec)).First();
            var settings      = ReflectionDiscovery.CreateSettingsForBenchmark(benchmarkData);
            var invoker       = ReflectionDiscovery.CreateInvokerForBenchmark(benchmarkData);

            _testableBenchmark = new Benchmark(settings, invoker, BenchmarkOutput);
        }
        public void ShouldFindSingleBenchmarkMethod()
        {
            var benchmarkMethods = ReflectionDiscovery.CreateBenchmarksForClass(SimpleBenchmarkTypeInfo);

            Assert.Equal(1, benchmarkMethods.Count);
            Assert.True(benchmarkMethods.All(x => x.Setup.Skip));
            Assert.True(benchmarkMethods.All(x => x.Cleanup.Skip));
        }
        public void ShouldFindMultipleBenchmarkMethods()
        {
            var benchmarkMethods = ReflectionDiscovery.CreateBenchmarksForClass(ComplexBenchmarkTypeInfo);

            Assert.Equal(2, benchmarkMethods.Count);
            Assert.True(benchmarkMethods.All(x => !x.Setup.Skip));
            Assert.True(benchmarkMethods.All(x => !x.Cleanup.Skip));
        }
        public void ReflectionDiscoveryShouldGetEmptyConfiguratorWhenNoMatchingConfiguratorIsDefined()
        {
            var discovery = new ReflectionDiscovery(_output);
            var unsupportedMeasurement = typeof(UnsupportedMeasurementAttribute);

            var actualConfiguratorType = discovery.GetConfiguratorTypeForMeasurement(unsupportedMeasurement);

            Assert.Equal(MeasurementConfigurator.EmptyConfiguratorType, actualConfiguratorType);
        }
        public void ReflectionDiscoveryShouldFindAllBuiltInConfigurators(Type measurementType,
                                                                         Type expectedConfiguratorType)
        {
            var discovery = new ReflectionDiscovery(_output);

            // limit our search to the declaring assembly
            var actualConfiguratorType = discovery.GetConfiguratorTypeForMeasurement(measurementType);

            Assert.Equal(expectedConfiguratorType, actualConfiguratorType);
        }
        //[InlineData(typeof(ConreteBenchmarkImpl), 1, true, true)] // Unsupported case right now
        public void ShouldCreateBenchmark(Type benchmarkType, int numberOfBenchmarks, bool hasSetup, bool hasCleanup)
        {
            var typeInfo          = benchmarkType.GetTypeInfo();
            var setupMethod       = ReflectionDiscovery.GetSetupMethod(typeInfo);
            var cleanupMethod     = ReflectionDiscovery.GetCleanupMethod(typeInfo);
            var benchmarkMetaData = ReflectionDiscovery.CreateBenchmarksForClass(typeInfo);

            Assert.Equal(!hasSetup, setupMethod.Skip);
            Assert.Equal(!hasCleanup, cleanupMethod.Skip);
            Assert.Equal(numberOfBenchmarks, benchmarkMetaData.Count);
        }
        public void ShouldProduceBenchmarkSettings_Complex()
        {
            var benchmarkMetaData = ReflectionDiscovery.CreateBenchmarksForClass(ComplexBenchmarkTypeInfo);
            var benchmarkSettings = ReflectionDiscovery.CreateSettingsForBenchmark(benchmarkMetaData.First());

            Assert.Equal(TestMode.Test, benchmarkSettings.TestMode);
            Assert.Equal(PerfBenchmarkAttribute.DefaultRunType, benchmarkSettings.RunMode);
            Assert.Equal(0, benchmarkSettings.GcBenchmarks.Count);
            Assert.Equal(2, benchmarkSettings.MemoryBenchmarks.Count);
            Assert.Equal(1, benchmarkSettings.DistinctMemoryBenchmarks.Count);
            Assert.Equal(0, benchmarkSettings.CounterBenchmarks.Count);
        }
        public void ShouldProduceBenchmarkSettings_Complex()
        {
            var discovery         = new ReflectionDiscovery(NoOpBenchmarkOutput.Instance);
            var benchmarkMetaData = ReflectionDiscovery.CreateBenchmarksForClass(ComplexBenchmarkTypeInfo);
            var benchmarkSettings = discovery.CreateSettingsForBenchmark(benchmarkMetaData.First());

            Assert.Equal(TestMode.Test, benchmarkSettings.TestMode);
            Assert.Equal(PerfBenchmarkAttribute.DefaultRunType, benchmarkSettings.RunMode);
            Assert.Equal(0, benchmarkSettings.Measurements.Count(x => x is GcBenchmarkSetting));
            Assert.Equal(2, benchmarkSettings.Measurements.Count(x => x is MemoryBenchmarkSetting));
            Assert.Equal(1, benchmarkSettings.DistinctMeasurements.Count(x => x is MemoryBenchmarkSetting));
            Assert.Equal(0, benchmarkSettings.CounterMeasurements.Count());
        }
Beispiel #15
0
        /// <summary>
        /// Executes the tests
        /// </summary>
        /// <returns>True if all tests passed.</returns>
        public TestRunnerResult Execute()
        {
            // Perform core / thread optimizations if we're running in single-threaded mode
            // But not if the user has specified that they're going to be running multi-threaded benchmarks
            SetProcessPriority(_package.Concurrent);

            IBenchmarkOutput output = CreateOutput();
            var discovery           = new ReflectionDiscovery(output);
            var result = new TestRunnerResult()
            {
                AllTestsPassed = true
            };

            try
            {
                foreach (var testFile in _package.Files)
                {
                    var assembly = AssemblyRuntimeLoader.LoadAssembly(testFile);

                    var benchmarks = discovery.FindBenchmarks(assembly);

                    foreach (var benchmark in benchmarks)
                    {
                        // verify if the benchmark should be included/excluded from the list of benchmarks to be run
                        if (_package.ShouldRunBenchmark(benchmark.BenchmarkName))
                        {
                            output.WriteLine($"------------ STARTING {benchmark.BenchmarkName} ---------- ");
                            benchmark.Run();
                            benchmark.Finish();

                            // if one assert fails, all fail
                            result.AllTestsPassed = result.AllTestsPassed && benchmark.AllAssertsPassed;
                            output.WriteLine($"------------ FINISHED {benchmark.BenchmarkName} ---------- ");
                            result.ExecutedTestsCount = result.ExecutedTestsCount + 1;
                        }
                        else
                        {
                            output.WriteLine($"------------ NOTRUN {benchmark.BenchmarkName} ---------- ");
                            result.IgnoredTestsCount = result.IgnoredTestsCount + 1;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                output.Error(ex, "Error while executing the tests.");
                result.AllTestsPassed = false;
            }

            return(result);
        }
        public static IEnumerable Benchmarks()
        {
            var discovery = new ReflectionDiscovery(new ActionBenchmarkOutput(reports => { }, results =>
            {
                foreach (var assertion in results.AssertionResults)
                {
                    Assert.True(assertion.Passed, results.BenchmarkName + " " + assertion.Message);
                }
            }));

            var benchmarks = discovery.FindBenchmarks(typeof(T)).ToList();

            foreach (var benchmark in benchmarks)
            {
                var name = benchmark.BenchmarkName.Split('+')[1];
                yield return(new TestCaseData(benchmark).SetName(name));
            }
        }
Beispiel #17
0
        public void BugFix153IsFixed()
        {
            var o = new ActionBenchmarkOutput(benchmarkAction: r =>
            {
                var name  = new CounterMetricName(CounterThroughputBenchmark.CounterName);
                var count = r.Data.StatsByMetric[name];
                Assert.True(count.PerSecondStats.Average > 0.0);
            });
            var d          = new ReflectionDiscovery(o, DefaultBenchmarkAssertionRunner.Instance, new RunnerSettings());
            var benchmarks = d.FindBenchmarks(typeof(CounterThroughputBenchmark));

            foreach (var b in benchmarks)
            {
                b.Run();
                b.Finish();
                Assert.True(b.AllAssertsPassed);
            }
        }
Beispiel #18
0
        /// <summary>
        /// Executes the tests
        /// </summary>
        /// <returns>True if all tests passed.</returns>
        public TestRunnerResult Execute()
        {
            // Perform core / thread optimizations if we're running in single-threaded mode
            // But not if the user has specified that they're going to be running multi-threaded benchmarks
            SetProcessPriority(_package.Concurrent);

            // pass in the runner settings so we can include them in benchmark reports
            // also, toggles tracing on or off
            var runnerSettings = new RunnerSettings()
            {
                ConcurrentModeEnabled = _package.Concurrent,
                TracingEnabled        = _package.Tracing
            };

            IBenchmarkOutput output = CreateOutput();


            var discovery = new ReflectionDiscovery(output,
                                                    DefaultBenchmarkAssertionRunner.Instance, // one day we might be able to pass in custom assertion runners, hence why this is here
                                                    runnerSettings);
            var result = new TestRunnerResult()
            {
                AllTestsPassed = true
            };

            try
            {
                foreach (var testFile in _package.Files)
                {
                    var assembly = AssemblyRuntimeLoader.LoadAssembly(testFile);

                    var benchmarks = discovery.FindBenchmarks(assembly);

                    foreach (var benchmark in benchmarks)
                    {
                        // verify if the benchmark should be included/excluded from the list of benchmarks to be run
                        if (_package.ShouldRunBenchmark(benchmark.BenchmarkName))
                        {
                            output.StartBenchmark(benchmark.BenchmarkName);
                            benchmark.Run();
                            benchmark.Finish();

                            // if one assert fails, all fail
                            result.AllTestsPassed = result.AllTestsPassed && benchmark.AllAssertsPassed;
                            output.FinishBenchmark(benchmark.BenchmarkName);
                            result.ExecutedTestsCount = result.ExecutedTestsCount + 1;
                        }
                        else
                        {
                            output.SkipBenchmark(benchmark.BenchmarkName);
                            result.IgnoredTestsCount = result.IgnoredTestsCount + 1;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                output.Error(ex, "Error while executing the tests.");
                result.AllTestsPassed = false;
            }

            return(result);
        }
        private RunSummary RunNBenchTest(object testClassInstance)
        {
            //TODO: It is not strictly required to use a RunSummary at the moment - needs more investigation to see
            //if we can provide more useful information via the standard xUnit mechanism. For now, what we have is sufficient.
            var summary = new RunSummary();

            var discovery = new ReflectionDiscovery(new ActionBenchmarkOutput(report => { }, results =>
            {
                if (results.Data.Exceptions.Any())
                {
                    throw new AggregateException(results.Data.Exceptions);
                }

                WriteTestOutput("");

                if (results.AssertionResults.Count > 0)
                {
                    //TODO: We should determine the accurate elapsed time at this point, to report it in the xUnit runner.
                    summary.Time = (decimal)results.Data.StatsByMetric.Values.First().Runs.First().ElapsedSeconds;

                    foreach (var assertion in results.AssertionResults)
                    {
                        //TODO: Maybe it is bubble to bubble this up, and provide the original line number?
                        Assert.True(assertion.Passed, assertion.Message);

                        summary.Total++;
                        if (!assertion.Passed)
                        {
                            summary.Failed++;
                        }
                        WriteTestOutput(assertion.Message);
                        WriteTestOutput("");
                    }
                }
                else
                {
                    WriteTestOutput("No assertions returned.");
                }

                WriteTestOutput("");
                WriteTestOutput("---------- Measurements ----------");
                WriteTestOutput("");

                if (results.Data.StatsByMetric.Count > 0)
                {
                    foreach (var measurement in results.Data.StatsByMetric)
                    {
                        WriteTestOutput("Metric : " + measurement.Key.ToHumanFriendlyString());
                        WriteTestOutput("");
                        WriteTestOutput($"Per Second ( {measurement.Value.Unit} )");

                        WriteTestOutput($"Average         : {measurement.Value.PerSecondStats.Average}");
                        WriteTestOutput($"Max             : {measurement.Value.PerSecondStats.Max}");
                        WriteTestOutput($"Min             : {measurement.Value.PerSecondStats.Min}");
                        WriteTestOutput($"Std. Deviation  : {measurement.Value.PerSecondStats.StandardDeviation}");
                        WriteTestOutput($"Std. Error      : {measurement.Value.PerSecondStats.StandardError}");
                        WriteTestOutput("");

                        WriteTestOutput($"Per Test ( {measurement.Value.Unit} )");
                        WriteTestOutput($"Average         : {measurement.Value.Stats.Average}");
                        WriteTestOutput($"Max             : {measurement.Value.Stats.Max}");
                        WriteTestOutput($"Min             : {measurement.Value.Stats.Min}");
                        WriteTestOutput($"Std. Deviation  : {measurement.Value.Stats.StandardDeviation}");
                        WriteTestOutput($"Std. Error      : {measurement.Value.Stats.StandardError}");

                        WriteTestOutput("");
                        WriteTestOutput("----------");
                        WriteTestOutput("");
                    }
                }
                else
                {
                    WriteTestOutput("No measurements returned.");
                }
            }));

            var testClassType = TestClass;

            //TODO: At the moment this is performing work that is not required, but is pragmatic in that a change is not required to the NBench core.
            var benchmarkMetaData = ReflectionDiscovery.CreateBenchmarksForClass(testClassType).First(b => b.Run.InvocationMethod.Name == TestMethod.Name);

            try
            {
                var invoker =
                    new XUnitReflectionBenchmarkInvoker(benchmarkMetaData, testClassInstance, TestMethodArguments);

                var settings  = discovery.CreateSettingsForBenchmark(benchmarkMetaData);
                var benchmark = new Benchmark(settings, invoker, discovery.Output, discovery.BenchmarkAssertions);

                Benchmark.PrepareForRun();
                benchmark.Run();
                benchmark.Finish();
            }
            catch (ReflectionTypeLoadException ex)
            {
                foreach (var e in ex.LoaderExceptions)
                {
                    WriteTestOutput(e.ToString());
                }

                throw;
            }

            return(summary);
        }
        public void ShouldNotCreateBenchmarkForClassWithOnlySkippedBenchmarkMethods()
        {
            var benchmarkMetaData = ReflectionDiscovery.CreateBenchmarksForClass(SkippedBenchmarksTypeInfo);

            Assert.Equal(0, benchmarkMetaData.Count);
        }
        public void ShouldNotCreateBenchmarkForClassWithNoDeclaredMeasurements()
        {
            var benchmarkMetaData = ReflectionDiscovery.CreateBenchmarksForClass(BenchmarkWithoutMeasurementsTypeInfo);

            Assert.Equal(0, benchmarkMetaData.Count);
        }
 public void ReflectionDiscoveryCanMatchExactConcreteTypes(Type measurementType, Type expectedConfiguratorType, bool matchResult)
 {
     Assert.True(ReflectionDiscovery.ConfiguratorSupportsMeasurement(measurementType, expectedConfiguratorType, true) == matchResult, $"Expected {matchResult} but got ${!matchResult}");
 }
 public void ReflectionDiscoveryCanFindMatchingConfiguratorViaReflection(Type measurementType,
                                                                         Type expectedConfiguratorType, bool matchResult)
 {
     Assert.True(ReflectionDiscovery.ConfiguratorSupportsMeasurement(measurementType, expectedConfiguratorType) == matchResult, $"Expected {matchResult} but got ${!matchResult}");
 }