private void RunCompetitions(string[] args) { for (int i = 0; i < Competitions.Length; i++) { var competition = Competitions[i]; if (args.Any(arg => competition.Name.ToLower().StartsWith(arg.ToLower())) || args.Contains("#" + i) || args.Contains("*")) { logger.WriteLineHeader("Target competition: " + competition.Name); using (var logStreamWriter = new StreamWriter(competition.Name + ".log")) { var loggers = new IBenchmarkLogger[] { new BenchmarkConsoleLogger(), new BenchmarkStreamLogger(logStreamWriter) }; var runner = new BenchmarkRunner(loggers); runner.RunCompetition(Activator.CreateInstance(competition), BenchmarkSettings.Parse(args)); } logger.NewLine(); } } if (args.Length > 0 && (args[0].StartsWith("http://") || args[0].StartsWith("https://"))) { var url = args[0]; Uri uri = new Uri(url); var name = uri.IsFile ? Path.GetFileName(uri.LocalPath) : "URL"; using (var logStreamWriter = new StreamWriter(name + ".log")) { var loggers = new IBenchmarkLogger[] { new BenchmarkConsoleLogger(), new BenchmarkStreamLogger(logStreamWriter) }; var runner = new BenchmarkRunner(loggers); runner.RunUrl(url, BenchmarkSettings.Parse(args)); } } }
[InlineData(2, 300)] // keep the values small since there's a real delay involved public void ShouldComputeMetricsCorrectly(int iterationCount, int millisecondRuntime) { var assertionOutput = new ActionBenchmarkOutput((report, warmup) => { if (warmup) { return; } var counterResults = report.Metrics[CounterName]; var projectedThroughput = 1000 / (double)IterationSpeedMs; // roughly the max value of this counter var observedDifference = Math.Abs(projectedThroughput - counterResults.MetricValuePerSecond); Assert.True(observedDifference <= 1.5d, $"delta between expected value and actual measured value should be <= 1.5, was {observedDifference} [{counterResults.MetricValuePerSecond} op /s]. Expected [{projectedThroughput} op /s]"); }, results => { var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max; Assert.Equal(iterationCount, counterResults); }); var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Throughput, iterationCount, millisecondRuntime, new[] { gcBenchmark }, new[] { memoryBenchmark }, new[] { counterBenchmark }); var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput); benchmark.Run(); }
public void ShouldSkipWarmupsWhenSpecified(int iterationCount) { var observedWarmupCount = -1; //we have a pre-warmup that always happens no matter what. Need to account for it. var assertionOutput = new ActionBenchmarkOutput((report, warmup) => { if (warmup) { observedWarmupCount++; } }, results => { Assert.Equal(1, observedWarmupCount); }); var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000, new List <IBenchmarkSetting>() { counterBenchmark }, new Dictionary <MetricName, MetricsCollectorSelector>() { { counterBenchmark.MetricName, new CounterSelector() } }) { SkipWarmups = true }; var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput); benchmark.Run(); }
private static IEnumerable <Benchmark> CompetitionToBenchmarks(object competition, BenchmarkSettings defaultSettings) { if (defaultSettings == null) { defaultSettings = BenchmarkSettings.CreateDefault(); } var targetType = competition.GetType(); var methods = targetType.GetMethods(); for (int i = 0; i < methods.Length; i++) { var methodInfo = methods[i]; var benchmarkAttribute = methodInfo.ResolveAttribute <BenchmarkAttribute>(); if (benchmarkAttribute != null) { var target = new BenchmarkTarget(targetType, methodInfo, benchmarkAttribute.Description); AssertBenchmarkMethodHasCorrectSignature(methodInfo); AssertBenchmarkMethodIsAccessible(methodInfo); AssertBenchmarkMethodIsNotDeclaredInGeneric(methodInfo); AssertBenchmarkMethodIsNotGeneric(methodInfo); foreach (var task in BenchmarkTask.Resolve(methodInfo, defaultSettings)) { yield return(new Benchmark(target, task)); } } } }
public void ShouldComputeMetricsCorrectly(int iterationCount) { var assertionOutput = new ActionBenchmarkOutput((report, warmup) => { if (!warmup) { var counterResults = report.Metrics[CounterName]; Assert.Equal(1, counterResults.MetricValue); } }, results => { var counterResults = results.Data.StatsByMetric[CounterName].Stats.Max; Assert.Equal(iterationCount, counterResults); }); var counterBenchmark = new CounterBenchmarkSetting(CounterName.CounterName, AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.Gen2, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, iterationCount, 1000, new List <IBenchmarkSetting>() { gcBenchmark, counterBenchmark }, new Dictionary <MetricName, MetricsCollectorSelector>() { { gcBenchmark.MetricName, new GcCollectionsSelector() }, { counterBenchmark.MetricName, new CounterSelector() } }); var benchmark = new Benchmark(settings, _benchmarkMethods, assertionOutput); benchmark.Run(); }
public void Should_build_when_at_least_one_metric_assigned() { var counterBenchmark = new CounterBenchmarkSetting("Test", AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.Gen2, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, 10, 1000, new List <IBenchmarkSetting>() { gcBenchmark, memoryBenchmark, counterBenchmark }, new Dictionary <MetricName, MetricsCollectorSelector>() { { gcBenchmark.MetricName, new GcCollectionsSelector() }, { counterBenchmark.MetricName, new CounterSelector() }, { memoryBenchmark.MetricName, new TotalMemorySelector() } }); var builder = new BenchmarkBuilder(settings); var run = builder.NewRun(WarmupData.PreWarmup); Assert.Equal(3, run.MeasureCount); Assert.Equal(1, run.Counters.Count); Assert.True(run.Counters.ContainsKey(counterBenchmark.CounterName)); }
public void ParseIgnoresUnrecognisedArguments() { BenchmarkSettings settings = BenchmarkSettings.Parse( "foo", "bar", "/calibration-time:10"); Assert.AreEqual(settings.CalibrationTime, TimeSpan.FromSeconds(10)); Assert.AreEqual(settings.TestTime, BenchmarkSettings.Default.TestTime); }
public BenchmarkResults(string typeName, BenchmarkSettings settings, IReadOnlyList <BenchmarkRunReport> runs) { Contract.Requires(!string.IsNullOrEmpty(typeName)); Contract.Requires(runs != null); BenchmarkName = typeName; Settings = settings; Runs = runs; StatsByMetric = new Dictionary <MetricName, AggregateMetrics>(); StatsByMetric = Aggregate(Runs); Exceptions = Runs.SelectMany(r => r.Exceptions).ToList(); }
public void Throughput(BenchmarkSettings settings, long operationsPerInvoke, Action setupAction, Action targetAction, Action idleAction) { setupAction(); targetAction(); idleAction(); long invokeCount = 1; double lastPreWarmupMilliseconds = 0; while (true) { var measurement = MultiInvoke("// Pre-Warmup", setupAction, targetAction, invokeCount, operationsPerInvoke); lastPreWarmupMilliseconds = measurement.Milliseconds; if (lastPreWarmupMilliseconds > InvokeTimoutMilliseconds) { break; } if (lastPreWarmupMilliseconds < 1) { invokeCount *= InvokeTimoutMilliseconds; } else { invokeCount *= (long)Math.Ceiling(InvokeTimoutMilliseconds / lastPreWarmupMilliseconds); } } double idleMilliseconds = 0; for (int i = 0; i < Math.Min(3, settings.WarmupIterationCount); i++) { var measurement = MultiInvoke("// Warmup (idle)", setupAction, idleAction, invokeCount, operationsPerInvoke); idleMilliseconds = measurement.Milliseconds; } invokeCount = invokeCount * 1000 / (long)Math.Round(Math.Min(1000, Math.Max(100, lastPreWarmupMilliseconds - idleMilliseconds))); Console.WriteLine("// IterationCount = " + invokeCount); long idleTicks = 0; var targetIdleInvokeCount = Math.Min(5, settings.TargetIterationCount); for (int i = 0; i < targetIdleInvokeCount; i++) { var measurement = MultiInvoke("// Target (idle)", setupAction, idleAction, invokeCount, operationsPerInvoke); idleTicks += measurement.Ticks; } idleTicks /= targetIdleInvokeCount; for (int i = 0; i < settings.WarmupIterationCount; i++) { MultiInvoke("// Warmup " + (i + 1), setupAction, targetAction, invokeCount, operationsPerInvoke, idleTicks); } for (int i = 0; i < settings.TargetIterationCount; i++) { MultiInvoke("Target " + (i + 1), setupAction, targetAction, invokeCount, operationsPerInvoke, idleTicks); } }
public void SingleRun(BenchmarkSettings settings, long operationsPerInvoke, Action setupAction, Action targetAction, Action idleAction) { for (int i = 0; i < settings.WarmupIterationCount; i++) { MultiInvoke("// Warmup " + (i + 1), setupAction, targetAction, 1, operationsPerInvoke); } for (int i = 0; i < settings.TargetIterationCount; i++) { MultiInvoke("Target " + (i + 1), setupAction, targetAction, 1, operationsPerInvoke); } }
public void Should_build_when_exactly_one_metric_assigned() { var counterBenchmark = new CounterBenchmarkSetting("Test", AssertionType.Total, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, 10, 1000, new GcBenchmarkSetting[0], new MemoryBenchmarkSetting[0], new CounterBenchmarkSetting[] { counterBenchmark }); var builder = new BenchmarkBuilder(settings); var run = builder.NewRun(WarmupData.PreWarmup); Assert.Equal(1, run.MeasureCount); Assert.Equal(1, run.Counters.Count); Assert.True(run.Counters.ContainsKey(counterBenchmark.CounterName)); }
public void Should_build_when_at_least_one_metric_assigned() { var counterBenchmark = new CounterBenchmarkSetting("Test", AssertionType.Total, Assertion.Empty); var gcBenchmark = new GcBenchmarkSetting(GcMetric.TotalCollections, GcGeneration.AllGc, AssertionType.Total, Assertion.Empty); var memoryBenchmark = new MemoryBenchmarkSetting(MemoryMetric.TotalBytesAllocated, Assertion.Empty); var settings = new BenchmarkSettings(TestMode.Measurement, RunMode.Iterations, 10, 1000, new[] { gcBenchmark }, new[] { memoryBenchmark }, new[] { counterBenchmark }); var builder = new BenchmarkBuilder(settings); var run = builder.NewRun(WarmupData.PreWarmup); Assert.Equal(2 + (SysInfo.Instance.MaxGcGeneration + 1), run.MeasureCount); Assert.Equal(1, run.Counters.Count); Assert.True(run.Counters.ContainsKey(counterBenchmark.CounterName)); }
private static IEnumerable <Benchmark> UrlToBenchmarks(string url, BenchmarkSettings defaultSettings) { string benchmarkContent = String.Empty; try { var webRequest = WebRequest.Create(url); using (var response = webRequest.GetResponse()) using (var content = response.GetResponseStream()) using (var reader = new StreamReader(content)) benchmarkContent = reader.ReadToEnd(); if (string.IsNullOrWhiteSpace(benchmarkContent)) { Console.WriteLine($"content of '{url}' is empty."); yield break; } } catch (Exception e) { Console.WriteLine("Exception: " + e.Message); yield break; } var cSharpCodeProvider = new CSharpCodeProvider(); var compilerParameters = new CompilerParameters(new[] { "mscorlib.dll", "System.Core.dll" }) { CompilerOptions = "/unsafe" }; compilerParameters.ReferencedAssemblies.Add(typeof(BenchmarkRunner).Assembly.Location); var compilerResults = cSharpCodeProvider.CompileAssemblyFromSource(compilerParameters, benchmarkContent); if (compilerResults.Errors.HasErrors) { compilerResults.Errors.Cast <CompilerError>().ToList().ForEach(error => Console.WriteLine(error.ErrorText)); yield break; } foreach (var type in compilerResults.CompiledAssembly.GetTypes()) { var instance = Activator.CreateInstance(type); foreach (var benchmark in CompetitionToBenchmarks(instance, defaultSettings)) { yield return(new Benchmark(new BenchmarkTarget(benchmark.Target.Type, benchmark.Target.Method, benchmark.Target.Description, benchmarkContent), benchmark.Task)); } } }
public IEnumerable <BenchmarkReport> RunUrl(string url, BenchmarkSettings defaultSettings = null) { return(RunCompetition(UrlToBenchmarks(url, defaultSettings).ToList())); }
public IEnumerable <BenchmarkReport> RunCompetition(object benchmarkCompetition, BenchmarkSettings defaultSettings = null) { return(RunCompetition(CompetitionToBenchmarks(benchmarkCompetition, defaultSettings).ToList())); }
public void Setup() { benchmarkManager = Resources.Load <BenchmarkSettings>("BenchmarkSettings"); behaviour = new GameObject("Coroutine Runner").AddComponent <EmptyBehaviour>(); }
public void ParseAllSettings() { BenchmarkSettings settings = BenchmarkSettings.Parse( "/calibration-time:10", "/test-time:50"); }