/// <summary>Reports analyser warning conclusion.</summary> /// <param name="message">Message text.</param> /// <param name="hint">Hint how to fix the warning.</param> /// <param name="report">The report the message belongs to.</param> public virtual void AddWarningConclusion( [NotNull] string message, [NotNull] string hint, BenchmarkReport report = null) { this.WriteWarningMessage(message, hint); }
/// <summary>Reports test error conclusion.</summary> /// <param name="target">Target the message applies for.</param> /// <param name="message">Message text.</param> /// <param name="report">The report the message belongs to.</param> public virtual void AddTestErrorConclusion( [NotNull] Target target, [NotNull] string message, BenchmarkReport report = null) { this.WriteTestErrorMessage(target, message); }
/// <summary>Limits for the benchmark.</summary> /// <param name="baselineReport">The baseline report.</param> /// <param name="benchmarkReport">The benchmark report.</param> /// <param name="limitMode">If <c>true</c> limit values should be returned. Actual values returned otherwise.</param> /// <returns>Limits for the benchmark or <c>null</c> if none.</returns> protected override CompetitionLimit TryGetCompetitionLimit( BenchmarkReport baselineReport, BenchmarkReport benchmarkReport, bool limitMode) { var baselineStat = new Statistics(baselineReport.GetResultRuns().Select(r => Math.Log(r.GetAverageNanoseconds()))); var benchmarkStat = new Statistics(benchmarkReport.GetResultRuns().Select(r => Math.Log(r.GetAverageNanoseconds()))); var minValueBaseline = Math.Exp(baselineStat.Mean); var maxValueBaseline = Math.Exp(baselineStat.Mean); // ReSharper disable CompareOfFloatsByEqualityOperator if (minValueBaseline == 0 || maxValueBaseline == 0) { // ReSharper restore CompareOfFloatsByEqualityOperator return(null); } var minValueBenchmark = Math.Exp(benchmarkStat.Mean); var maxValueBenchmark = Math.Exp(benchmarkStat.Mean); var minRatio = minValueBenchmark / minValueBaseline; var maxRatio = maxValueBenchmark / maxValueBaseline; if (limitMode) { minRatio *= 0.98; maxRatio *= 1.02; } return(new CompetitionLimit( Math.Min(minRatio, maxRatio), maxRatio)); }
public override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { var statistics = report.ResultStatistics; if (statistics == null) { yield break; } if (statistics.N < EngineResolver.DefaultMinTargetIterationCount) { yield return(CreateHint($"The number of iterations was set to less than {EngineResolver.DefaultMinTargetIterationCount}, " + $"{nameof(MultimodalDistributionAnalyzer)} needs at least {EngineResolver.DefaultMinTargetIterationCount} iterations to work.")); } double mValue = MathHelper.CalculateMValue(statistics); if (mValue > 4.2) { yield return(Create("is multimodal", mValue, report)); } else if (mValue > 3.2) { yield return(Create("is bimodal", mValue, report)); } else if (mValue > 2.8) { yield return(Create("can have several modes", mValue, report)); } }
public void StdDevTest() { // TODO: write a mock for benchmark reports var report = new BenchmarkReport( new Benchmark( new BenchmarkTarget(null, null), new BenchmarkTask(1, new BenchmarkConfiguration( BenchmarkMode.SingleRun, BenchmarkPlatform.AnyCpu, BenchmarkJitVersion.HostJit, BenchmarkFramework.HostFramework, BenchmarkToolchain.Classic, BenchmarkRuntime.Clr, 1, 1))), new List <BenchmarkRunReport> { new BenchmarkRunReport(1, 10), new BenchmarkRunReport(1, 50), new BenchmarkRunReport(1, 100) }); var reports = new[] { report }; var warnings = new BenchmarkStdDevAnalyser().Analyze(reports).ToList(); Assert.Equal(1, warnings.Count); foreach (var warning in warnings) { output.WriteLine($"[{warning.Kind}] {warning.Message}"); } }
/// <summary>Reports test error conclusion.</summary> /// <param name="message">Message text.</param> /// <param name="report">The report the message belongs to.</param> public void AddTestErrorConclusion( string message, BenchmarkReport report = null) { this.WriteTestErrorMessage(message); ConclusionsList.Add(Conclusion.CreateWarning(Id, message, report)); }
private Conclusion([NotNull] string analyserId, ConclusionKind kind, [NotNull] string message, [CanBeNull] BenchmarkReport report) { AnalyserId = analyserId; Kind = kind; Message = message; Report = report; }
/// <summary>Limits for the benchmark.</summary> /// <param name="baselineReport">The baseline report.</param> /// <param name="benchmarkReport">The benchmark report.</param> /// <param name="limitMode">If <c>true</c> limit values should be returned. Actual values returned otherwise.</param> /// <returns>Limits for the benchmark or <c>null</c> if none.</returns> protected override CompetitionLimit TryGetCompetitionLimit( BenchmarkReport baselineReport, BenchmarkReport benchmarkReport, bool limitMode) { var minPercentile = limitMode ? Math.Max(0, MinRatioPercentile - LimitModeDelta) : MinRatioPercentile; var maxPercentile = limitMode ? Math.Min(99, MaxRatioPercentile + LimitModeDelta) : MaxRatioPercentile; var minValueBaseline = baselineReport.ResultStatistics.Percentiles.Percentile(minPercentile); var maxValueBaseline = baselineReport.ResultStatistics.Percentiles.Percentile(maxPercentile); // ReSharper disable CompareOfFloatsByEqualityOperator if (minValueBaseline == 0 || maxValueBaseline == 0) { // ReSharper restore CompareOfFloatsByEqualityOperator return(null); } var minValueBenchmark = benchmarkReport.ResultStatistics.Percentiles.Percentile(minPercentile); var maxValueBenchmark = benchmarkReport.ResultStatistics.Percentiles.Percentile(maxPercentile); var minRatio = minValueBenchmark / minValueBaseline; var maxRatio = maxValueBenchmark / maxValueBaseline; return(new CompetitionLimit( Math.Min(minRatio, maxRatio), maxRatio)); }
protected override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { foreach (string error in report.ExecuteResults.SelectMany(r => r.Errors)) { yield return(CreateError(error, report)); } }
protected override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { var currentFrequency = summary.HostEnvironmentInfo.CpuInfo.Value.MaxFrequency; if (!currentFrequency.HasValue || currentFrequency <= 0) { currentFrequency = FallbackCpuResolutionValue.ToFrequency(); } var entire = report.AllMeasurements; var overheadMeasurements = entire.Where(m => m.Is(IterationMode.Overhead, IterationStage.Actual)).ToArray(); var workloadMeasurements = entire.Where(m => m.Is(IterationMode.Workload, IterationStage.Actual)).ToArray(); if (workloadMeasurements.IsEmpty()) { yield break; } var workload = workloadMeasurements.GetStatistics(); var threshold = currentFrequency.Value.ToResolution().Nanoseconds / 2; var zeroMeasurement = overheadMeasurements.Any() ? ZeroMeasurementHelper.CheckZeroMeasurementTwoSamples(workload.WithoutOutliers(), overheadMeasurements.GetStatistics().WithoutOutliers()) : ZeroMeasurementHelper.CheckZeroMeasurementOneSample(workload.WithoutOutliers(), threshold); if (zeroMeasurement) { yield return(CreateWarning("The method duration is indistinguishable from the empty method duration", report, false)); } }
public override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report) { if (report.Benchmark.Target.Type.GetTypeInfo().Assembly.IsDebug().IsTrue()) { yield return(CreateWarning("Benchmark was built in DEBUG configuration. Please, build it in RELEASE.", report)); } }
public override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report) { var actual = report.AllMeasurements.Where(m => m.IterationMode == IterationMode.MainTarget).ToArray(); var result = report.AllMeasurements.Where(m => m.IterationMode == IterationMode.Result).ToArray(); var actualOutliers = actual.GetStatistics().Outliers; bool removeOutliers = report.Benchmark.Job.ResolveValue(AccuracyMode.RemoveOutliersCharacteristic, EngineResolver.Instance); // TODO: improve if (result.Length + (actualOutliers.Length * (removeOutliers ? 1 : 0)) != actual.Length) { // This should never happen yield return(CreateHint( string.Format( "Something went wrong with outliers: Size(MainTarget) = {0}, Size(MainTarget/Outliers) = {1}, Size(Result) = {2}), RemoveOutliers = {3}", actual.Length, actualOutliers.Length, result.Length, removeOutliers), report)); yield break; } if (actualOutliers.Any()) { int n = actualOutliers.Length; string words = n == 1 ? "outlier was " : "outliers were"; string verb = removeOutliers ? "removed" : "detected"; yield return(CreateHint($"{n} {words} {verb}", report)); } }
// no need to run benchmarks once again, just do this after all runs public void ProcessResults(Benchmark benchmark, BenchmarkReport report) { if (ShouldUseMonoDisassembler(benchmark)) { results.Add(benchmark, monoDisassembler.Disassemble(benchmark, benchmark.Job.Env.Runtime as MonoRuntime)); } }
public override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { var actual = report.AllMeasurements.Where(m => m.IterationMode == IterationMode.MainTarget).ToArray(); if (actual.IsEmpty()) { yield break; } var result = report.AllMeasurements.Where(m => m.IterationMode == IterationMode.Result).ToArray(); var outlierMode = report.Benchmark.Job.ResolveValue(AccuracyMode.OutlierModeCharacteristic, EngineResolver.Instance); // TODO: improve var statistics = actual.GetStatistics(); var allOutliers = statistics.AllOutliers; var actualOutliers = statistics.GetActualOutliers(outlierMode); if (result.Length + actualOutliers.Length != actual.Length) { // This should never happen yield return(CreateHint( $"Something went wrong with outliers: " + $"Size(MainTarget) = {actual.Length}, " + $"Size(MainTarget/Outliers) = {actualOutliers.Length}, " + $"Size(Result) = {result.Length}), " + $"OutlierMode = {outlierMode}", report)); yield break; } if (allOutliers.Any()) { yield return(CreateHint(GetMessage(actualOutliers.Length, allOutliers.Length), report)); } }
private BenchmarkReport Run(IBenchmarkLogger logger, Benchmark benchmark, IList <string> importantPropertyNames, BenchmarkParameters parameters = null) { var toolchain = Plugins.CreateToolchain(benchmark, logger); logger.WriteLineHeader("// **************************"); logger.WriteLineHeader("// Benchmark: " + benchmark.Description); var generateResult = Generate(logger, toolchain); if (!generateResult.IsGenerateSuccess) { return(BenchmarkReport.CreateEmpty(benchmark, parameters)); } var buildResult = Build(logger, toolchain, generateResult); if (!buildResult.IsBuildSuccess) { return(BenchmarkReport.CreateEmpty(benchmark, parameters)); } var runReports = Execute(logger, benchmark, importantPropertyNames, parameters, toolchain, buildResult); return(new BenchmarkReport(benchmark, runReports, parameters)); }
/// <summary>Reports test error conclusion.</summary> /// <param name="message">Message text.</param> /// <param name="report">The report the message belongs to.</param> public override void AddTestErrorConclusion( string message, BenchmarkReport report = null) { base.AddTestErrorConclusion(message, report); ConclusionsList.Add(Conclusion.CreateWarning(Id, message, report)); }
public void Stop(Benchmark benchmark, BenchmarkReport report) { foreach (var diagnoser in diagnosers) { diagnoser.Stop(benchmark, report); } }
public override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { if (report.Benchmark.Target.Type.GetTypeInfo().Assembly.IsJitOptimizationDisabled().IsTrue()) { yield return(CreateWarning("Benchmark was built without optimization enabled (most probably a DEBUG configuration). Please, build it in RELEASE.", report)); } }
/// <summary>Gets report for the benchmark.</summary> /// <param name="benchmark">The benchmark.</param> /// <param name="summary">Summary for the run.</param> /// <param name="benchmarkReport">The benchmark report.</param> /// <returns><c>true</c> if benchmark report is available.</returns> protected static bool TryGetReport( [NotNull] Benchmark benchmark, [NotNull] Summary summary, out BenchmarkReport benchmarkReport) { benchmarkReport = summary[benchmark]; return(benchmarkReport?.ResultStatistics != null); }
/// <summary>Gets reports for the benchmark and the baseline.</summary> /// <param name="benchmark">The benchmark.</param> /// <param name="summary">Summary for the run.</param> /// <param name="benchmarkReport">The benchmark report.</param> /// <param name="baselineReport">The baseline report.</param> /// <returns><c>true</c> if both benchmark and baseline reports are available.</returns> protected static bool TryGetReports( [NotNull] Benchmark benchmark, [NotNull] Summary summary, out BenchmarkReport benchmarkReport, out BenchmarkReport baselineReport) { benchmarkReport = null; baselineReport = null; var baselineBenchmark = summary.TryGetBaseline(benchmark); if (baselineBenchmark == null) { return(false); } benchmarkReport = summary[benchmark]; if (benchmarkReport?.ResultStatistics == null) { return(false); } baselineReport = summary[baselineBenchmark]; if (baselineReport?.ResultStatistics == null) { return(false); } return(true); }
private static void CheckSummary(BenchmarkReport magicReport) { if (magicReport?.ResultStatistics == null) { throw new InvalidOperationException(); } }
protected override IEnumerable <Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { var workloadActual = report.AllMeasurements.Where(m => m.Is(IterationMode.Workload, IterationStage.Actual)).ToArray(); if (workloadActual.IsEmpty()) { yield break; } var result = report.AllMeasurements.Where(m => m.Is(IterationMode.Workload, IterationStage.Result)).ToArray(); var outlierMode = report.BenchmarkCase.Job.ResolveValue(AccuracyMode.OutlierModeCharacteristic, EngineResolver.Instance); // TODO: improve var statistics = workloadActual.GetStatistics(); var allOutliers = statistics.AllOutliers; var actualOutliers = statistics.GetActualOutliers(outlierMode); if (result.Length + actualOutliers.Length != workloadActual.Length) { // This should never happen yield return(CreateHint( "Something went wrong with outliers: " + $"Size(WorkloadActual) = {workloadActual.Length}, " + $"Size(WorkloadActual/Outliers) = {actualOutliers.Length}, " + $"Size(Result) = {result.Length}), " + $"OutlierMode = {outlierMode}", report)); yield break; } var cultureInfo = summary.GetCultureInfo(); if (allOutliers.Any()) { yield return(CreateHint(GetMessage(actualOutliers, allOutliers, statistics.LowerOutliers, statistics.UpperOutliers, cultureInfo), report)); } }
/// <summary>Reports analyser warning conclusion.</summary> /// <param name="message">Message text.</param> /// <param name="hint">Hint how to fix the warning.</param> /// <param name="report">The report the message belongs to.</param> public void AddWarningConclusion( string message, string hint, BenchmarkReport report = null) { this.WriteWarningMessage(message, hint); ConclusionsList.Add(Conclusion.CreateWarning(Id, message, report)); }
public void ProcessResults(Benchmark benchmark, BenchmarkReport report) { var processId = BenchmarkToProcess[benchmark]; var stats = StatsPerProcess[processId]; stats.TotalOperations = report.AllMeasurements.Where(measurement => !measurement.IterationMode.IsIdle()).Sum(m => m.Operations); results.Add(benchmark, stats); }
/// <summary>Gets the values from benchmark report.</summary> /// <param name="benchmarkReport">The benchmark report.</param> /// <param name="summary">The summary.</param> /// <returns>Metric values from benchmark report</returns> protected override double[] GetValuesFromReport(BenchmarkReport benchmarkReport, Summary summary) { var totalOps = benchmarkReport.GcStats.TotalOperations; return(_results[summary].TryGetValue(benchmarkReport.Benchmark, out var result) ? new[] { (double)result / totalOps } : new double[0]); }
/// <summary>Reports test error conclusion.</summary> /// <param name="target">Target the message applies for.</param> /// <param name="message">Message text.</param> /// <param name="report">The report the message belongs to.</param> public void AddTestErrorConclusion( Target target, string message, BenchmarkReport report = null) { this.WriteTestErrorMessage(target, message); ConclusionsList.Add(Conclusion.CreateWarning(Id, FormatMessage(target, message), report)); }
static string Info(BenchmarkReport report) { #if NET40 return(report.Benchmark.Target.MethodTitle); #else return(report.BenchmarkCase.Descriptor.DisplayInfo.Substring(nameof(RUtils_Benchmarks).Length + 6).Trim(' ', '\'')); #endif }
/// <summary> /// Prints a benchmark result. /// </summary> /// <param name="report">A report of a succseeded BenchmarkReport.</param> internal static void PrintResult(BenchmarkReport report) { PrintInfo(report.BenchmarkCase.Descriptor.WorkloadMethodDisplayInfo.Replace("'", "")); Console.Write("\t"); Console.ForegroundColor = ConsoleColor.DarkYellow; Console.Write($"Avg: {(int)(report.ResultStatistics.Mean / 1000000)} ms | Med: {(int)(report.ResultStatistics.Median / 1000000)} ms | Dev: {(report.ResultStatistics.StandardDeviation / 1000000):N2} ms"); Console.ForegroundColor = ConsoleColor.Gray; Console.WriteLine(); }
public override IEnumerable<Conclusion> AnalyseReport(BenchmarkReport report, Summary summary) { var target = report.AllMeasurements.Where(m => m.Is(IterationMode.Workload, IterationStage.General)).ToArray(); if (target.IsEmpty()) yield break; var minActualIterationTime = TimeInterval.FromNanoseconds(target.Min(m => m.Nanoseconds)); if (minActualIterationTime < MinSufficientIterationTime) yield return CreateWarning($"MinIterationTime = {minActualIterationTime} which is very small. It's recommended to increase it.", report); }
/// <summary>Reports analyser warning conclusion.</summary> /// <param name="target">Target the message applies for.</param> /// <param name="message">Message text.</param> /// <param name="hint">Hint how to fix the warning.</param> /// <param name="report">The report the message belongs to.</param> public void AddWarningConclusion( Target target, string message, string hint, BenchmarkReport report = null) { this.WriteWarningMessage(target, message, hint); ConclusionsList.Add(Conclusion.CreateWarning(Id, FormatMessage(target, message), report)); }