private Assembly[] LoadReferencedAssemblies()
        {
            var assemblies = new List <Assembly>()
            {
                Assembly
            };

#if DEBUG
            _trace.WriteLine($"[NetCoreAssemblyRuntimeLoader][LoadReferencedAssemblies] Loading references for [{Assembly}]");
#endif
            foreach (var assemblyName in Assembly.GetReferencedAssemblies())
            {
                try
                {
#if DEBUG
                    _trace.WriteLine($"[NetCoreAssemblyRuntimeLoader][LoadReferencedAssemblies] Attempting to load [{assemblyName}]");
#endif
                    assemblies.Add(_loadContext.LoadFromAssemblyName(assemblyName));
                }
                catch (Exception ex)
                {
                    // exception occurred, but we don't care
#if DEBUG
                    _trace.Error(ex, $"[NetCoreAssemblyRuntimeLoader][LoadReferencedAssemblies] Failed to load [{assemblyName}]");
#endif
                }
            }

            return(assemblies.ToArray());
        }
Example #2
0
        /// <summary>
        /// Executes the tests
        /// </summary>
        /// <returns>True if all tests passed.</returns>
        public TestRunnerResult Execute()
        {
            // Perform core / thread optimizations if we're running in single-threaded mode
            // But not if the user has specified that they're going to be running multi-threaded benchmarks
            SetProcessPriority(_package.Concurrent);

            IBenchmarkOutput output = CreateOutput();
            var discovery           = new ReflectionDiscovery(output);
            var result = new TestRunnerResult()
            {
                AllTestsPassed = true
            };

            try
            {
                foreach (var testFile in _package.Files)
                {
                    var assembly = AssemblyRuntimeLoader.LoadAssembly(testFile);

                    var benchmarks = discovery.FindBenchmarks(assembly);

                    foreach (var benchmark in benchmarks)
                    {
                        // verify if the benchmark should be included/excluded from the list of benchmarks to be run
                        if (_package.ShouldRunBenchmark(benchmark.BenchmarkName))
                        {
                            output.WriteLine($"------------ STARTING {benchmark.BenchmarkName} ---------- ");
                            benchmark.Run();
                            benchmark.Finish();

                            // if one assert fails, all fail
                            result.AllTestsPassed = result.AllTestsPassed && benchmark.AllAssertsPassed;
                            output.WriteLine($"------------ FINISHED {benchmark.BenchmarkName} ---------- ");
                            result.ExecutedTestsCount = result.ExecutedTestsCount + 1;
                        }
                        else
                        {
                            output.WriteLine($"------------ NOTRUN {benchmark.BenchmarkName} ---------- ");
                            result.IgnoredTestsCount = result.IgnoredTestsCount + 1;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                output.Error(ex, "Error while executing the tests.");
                result.AllTestsPassed = false;
            }

            return(result);
        }
        private static bool MethodHasValidBenchmark(MethodInfo x)
        {
            var hasPerformanceBenchmarkAttribute  = x.IsDefined(PerformanceBenchmarkAttributeType, true);
            var hasAtLeastOneMeasurementAttribute = x.IsDefined(MeasurementAttributeType, true);
            var skipReason = (x.GetCustomAttribute(PerformanceBenchmarkAttributeType) as
                              PerfBenchmarkAttribute)?.Skip;
            var benchmarkIsSkipped = hasPerformanceBenchmarkAttribute &&
                                     !String.IsNullOrEmpty(skipReason);

            // code below is for adding interface support
            //var bla =
            //   (from @interface in x.DeclaringType.GetInterfaces()
            //    let map = x.DeclaringType.GetInterfaceMap(@interface)
            //    let index = Array.IndexOf(map.TargetMethods, x)
            //    where index >= 0
            //    select map.InterfaceMethods[index]).FirstOrDefault();
            //var hasPerformanceBenchmarkAttributeOnInterface = bla.IsDefined(PerformanceBenchmarkAttributeType), true);


            /*
             * If user defined a PerformanceBenchmark attribute but never added on any Measurement
             * attributes, then we need to log a warning here.
             */
            if (hasPerformanceBenchmarkAttribute && !hasAtLeastOneMeasurementAttribute)
            {
                ReflectionOutput.Warning($"{x.DeclaringType?.Name}+{x.Name} has a declared PerformanceBenchmarkAttribute but no declared measurements. Skipping...");
            }
            else if (benchmarkIsSkipped)
            {
                ReflectionOutput.WriteLine($"Skipping {x.DeclaringType?.Name}+{x.Name}. Reason: {skipReason}.");
            }

            return(hasPerformanceBenchmarkAttribute && hasAtLeastOneMeasurementAttribute && !benchmarkIsSkipped);
        }
Example #4
0
        /// <summary>
        /// NBench Runner takes the following <see cref="args"/>
        ///
        /// C:\> NBench.Runner.exe [assembly name] [output-directory={dir-path}]
        ///
        /// </summary>
        /// <param name="args">The commandline arguments</param>
        static int Main(string[] args)
        {
            Output    = new CompositeBenchmarkOutput(new ConsoleBenchmarkOutput(), new MarkdownBenchmarkOutput(CommandLine.GetProperty("output-directory")));
            Discovery = new ReflectionDiscovery(Output);
            string assemblyPath = Path.GetFullPath(args[0]);

            // TODO: See issue https://github.com/petabridge/NBench/issues/3
            var assembly = AssemblyRuntimeLoader.LoadAssembly(assemblyPath);


            /*
             * Set processor affinity
             */
            Process Proc = Process.GetCurrentProcess();

            Proc.ProcessorAffinity = new IntPtr(2); // either of the first two processors

            /*
             * Set priority
             */
            Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.High;
            Thread.CurrentThread.Priority = ThreadPriority.Highest;


            var  benchmarks        = Discovery.FindBenchmarks(assembly);
            bool anyAssertFailures = false;

            foreach (var benchmark in benchmarks)
            {
                Output.WriteLine($"------------ STARTING {benchmark.BenchmarkName} ---------- ");
                benchmark.Run();
                benchmark.Finish();

                // if one assert fails, all fail
                anyAssertFailures = anyAssertFailures || !benchmark.AllAssertsPassed;
                Output.WriteLine($"------------ FINISHED {benchmark.BenchmarkName} ---------- ");
            }
            return(anyAssertFailures ? -1 : 0);
        }
Example #5
0
        /// <summary>
        /// NBench Runner takes the following <see cref="args"/>
        /// 
        /// C:\> NBench.Runner.exe [assembly name] [output-directory={dir-path}]
        /// 
        /// </summary>
        /// <param name="args">The commandline arguments</param>
        static int Main(string[] args)
        {
            Output = new CompositeBenchmarkOutput(new ConsoleBenchmarkOutput(), new MarkdownBenchmarkOutput(CommandLine.GetProperty("output-directory")));
            Discovery = new ReflectionDiscovery(Output);
            string assemblyPath = Path.GetFullPath(args[0]);

            // TODO: See issue https://github.com/petabridge/NBench/issues/3
            var assembly = AssemblyRuntimeLoader.LoadAssembly(assemblyPath);


            /*
             * Set processor affinity
             */
            Process Proc = Process.GetCurrentProcess();
            Proc.ProcessorAffinity = new IntPtr(2); // either of the first two processors

            /*
             * Set priority
             */
            Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.High;
            Thread.CurrentThread.Priority = ThreadPriority.Highest;


            var benchmarks = Discovery.FindBenchmarks(assembly);
            bool anyAssertFailures = false;
            foreach (var benchmark in benchmarks)
            {
                Output.WriteLine($"------------ STARTING {benchmark.BenchmarkName} ---------- ");
                benchmark.Run();
                benchmark.Finish();

                // if one assert fails, all fail
                anyAssertFailures = anyAssertFailures || !benchmark.AllAssertsPassed;
                Output.WriteLine($"------------ FINISHED {benchmark.BenchmarkName} ---------- ");
            }
            return anyAssertFailures ? -1 : 0;
        }
Example #6
0
 private void WriteMessage(TraceMessage message)
 {
     // all traces go into the usual log. no special treatment for errors or warnings.
     _benchmarkOutput.WriteLine(message.ToString());
 }
Example #7
0
        /// <summary>
        /// Executes the tests
        /// </summary>
        /// <returns>True if all tests passed.</returns>
        public TestRunnerResult Execute()
        {
            // Perform core / thread optimizations if we're running in single-threaded mode
            // But not if the user has specified that they're going to be running multi-threaded benchmarks
            SetProcessPriority(_package.Concurrent);

            // pass in the runner settings so we can include them in benchmark reports
            // also, toggles tracing on or off
            var runnerSettings = new RunnerSettings()
            {
                ConcurrentModeEnabled = _package.Concurrent,
                TracingEnabled        = _package.Tracing
            };

            IBenchmarkOutput output = CreateOutput();


            var discovery = new ReflectionDiscovery(output,
                                                    DefaultBenchmarkAssertionRunner.Instance, // one day we might be able to pass in custom assertion runners, hence why this is here
                                                    runnerSettings);
            var result = new TestRunnerResult()
            {
                AllTestsPassed = true
            };

            try
            {
                foreach (var testFile in _package.Files)
                {
                    output.WriteLine($"Executing Benchmarks in {testFile}");
                    using (var assembly = AssemblyRuntimeLoader.LoadAssembly(testFile, output))
                    {
                        var benchmarks = discovery.FindBenchmarks(assembly.Assembly);

                        foreach (var benchmark in benchmarks)
                        {
                            // verify if the benchmark should be included/excluded from the list of benchmarks to be run
                            if (_package.ShouldRunBenchmark(benchmark.BenchmarkName))
                            {
                                output.StartBenchmark(benchmark.BenchmarkName);
                                benchmark.Run();
                                benchmark.Finish();

                                // if one assert fails, all fail
                                result.AllTestsPassed = result.AllTestsPassed && benchmark.AllAssertsPassed;
                                output.FinishBenchmark(benchmark.BenchmarkName);
                                result.ExecutedTestsCount = result.ExecutedTestsCount + 1;
                            }
                            else
                            {
                                output.SkipBenchmark(benchmark.BenchmarkName);
                                result.IgnoredTestsCount = result.IgnoredTestsCount + 1;
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                output.Error(ex, "Error while executing the tests.");
                result.AllTestsPassed = false;
            }

            return(result);
        }