/// <summary> /// Similar to Benchmark.net's Benchmarker.Run. /// Benchmarks each method on the given class with the [Benchmark] attribute. /// Less accurate but much faster to complete than Benchmark.net runs. /// </summary> /// <param name="typeWithBenchmarkMethods">Type containing methods to benchmark</typeparam> /// <param name="settings">Measurement settings, or null for defaults</param> public MeasureResult Run(string methodName, Action method) { List <TableCell> row = new List <TableCell>(); // Benchmark this method MeasureResult result = Measure.Operation(method, _settings); // Report current time row.Add(TableCell.String(methodName)); row.Add(TableCell.Time(result.SecondsPerIteration)); // Compare to each loaded benchmark foreach (var comparison in _comparisons) { bool isBaseline = comparison.Key == BaselineColumnName; double comparisonTime; if (!comparison.Value.TryGetValue(methodName, out comparisonTime)) { comparisonTime = 0.0; } row.Add(TableCell.Time(comparisonTime)); row.Add(BenchmarkRatio(comparisonTime, result.SecondsPerIteration, isBaseline)); if (isBaseline && row.Last().Color == TableColor.Red) { HasFailures = true; } } _table.AppendRow(row); return(result); }
public static MeasureResult Operation(Action operation, MeasureSettings settings = null) { settings = settings ?? MeasureSettings.Default; long ramBefore = (settings.MeasureMemory ? GC.GetTotalMemory(true) : 0); TimeSpan elapsedAfterFirst = TimeSpan.Zero; Stopwatch total = Stopwatch.StartNew(); Stopwatch single = Stopwatch.StartNew(); int pass = 0; int iterations = 0; int innerIterations = 1; while (pass < settings.MinIterations || (pass < settings.MaxIterations && total.Elapsed < settings.WithinTime)) { // Measure the operation in a tight loop single.Restart(); for (int i = 0; i < innerIterations; ++i) { operation(); } single.Stop(); // Double inner loop iterations and don't count pass until we spend enough time to measure reliably in a pass if (single.ElapsedMilliseconds < 100 && innerIterations < 32 * 1024 * 1024) { innerIterations *= 2; pass--; } // Track time from all iterations but the first (which is often much slower) if (iterations > 0) { elapsedAfterFirst += single.Elapsed; } iterations += innerIterations; pass++; } MeasureResult result = new MeasureResult() { Iterations = (iterations == 1 ? 1 : iterations - 1), Elapsed = (iterations == 1 ? single.Elapsed : elapsedAfterFirst), AddedMemoryBytes = (settings.MeasureMemory ? GC.GetTotalMemory(true) - ramBefore : 0), }; return(result); }
public static MeasureResult <T> Operation <T>(Func <T> operation, MeasureSettings settings = null) { T output = default(T); MeasureResult inner = Operation(() => { output = operation(); }, settings); return(new MeasureResult <T>() { Iterations = inner.Iterations, Elapsed = inner.Elapsed, AddedMemoryBytes = inner.AddedMemoryBytes, Output = output }); }
internal void Calibrate() { MeasureResult calibrationResult = Run(CalibrationMethodName, CalibrationFunction); // Ensure a "failure" in calibration isn't counted HasFailures = false; // Save calibration ratio Dictionary <string, double> baseline = _comparisons.Values.FirstOrDefault(); if (baseline != null && baseline.TryGetValue(CalibrationMethodName, out double baselineSeconds) && baselineSeconds >= 0.0) { // If current was 2x baseline, must multiply other baselines by 2x to get a scaled value to compare to. _baselineAdjustment = (calibrationResult.SecondsPerIteration / baselineSeconds); } }
/// <summary> /// For each file in inputPath, load into an ArgumentClass with loader(), /// then run each [Benchmark] method on operationsClass and report the results. /// </summary> /// <typeparam name="ArgumentClass">Object Model type each file turns into.</typeparam> /// <param name="operationsClass">Class containing [Benchmark] methods which take the ArgumentClass type</param> /// <param name="inputPath">Folder Path or single File Path which can be loaded into ArgumentClass</param> /// <param name="loader">Method which takes a file path and loads into an ArgumentClass intance</param> public static void RunFiles <ArgumentClass>(Type operationsClass, string inputPath, Func <string, ArgumentClass> loader) { // Find all [Benchmark] methods which take an ArgumentClass. Dictionary <string, Action <ArgumentClass> > benchmarkMethods = BenchmarkReflector.BenchmarkMethods <Action <ArgumentClass> >(operationsClass); List <TableCell> columns = new List <TableCell>() { new TableCell("File"), new TableCell("Size", Align.Right), new TableCell("Load", Align.Right), new TableCell("RAM", Align.Right, TableColor.Green) }; foreach (string key in benchmarkMethods.Keys) { columns.Add(new TableCell(key, Align.Right, TableColor.Green)); } ConsoleTable table = new ConsoleTable(columns.ToArray()); foreach (string filePath in FilesForPath(inputPath)) { long fileLengthBytes = new FileInfo(filePath).Length; ArgumentClass instance = default(ArgumentClass); List <TableCell> row = new List <TableCell>(); // Use the loader to load the file; log name, size, load rate. MeasureResult load = Measure.Operation(() => instance = loader(filePath), MeasureSettings.Load); row.Add(TableCell.String(Path.GetFileName(filePath))); row.Add(TableCell.Size(fileLengthBytes)); row.Add(TableCell.Rate(fileLengthBytes, load.SecondsPerIteration)); row.Add(TableCell.Size(load.AddedMemoryBytes)); // Log action time per operation. foreach (string key in benchmarkMethods.Keys) { Action <ArgumentClass> operation = benchmarkMethods[key]; MeasureResult opResult = Measure.Operation(() => operation(instance)); row.Add(TableCell.Time(opResult.SecondsPerIteration)); } table.AppendRow(row); } }