Пример #1
0
        private static void CheckMembers(ResultAnalysis analysis)
        {
            var summary = analysis.Summary;

            // No duplicate names
            var targets          = summary.GetBenchmarkTargets();
            var duplicateTargets = GetDuplicates(
                targets,
                t => t.Method.Name,
                t => $"\r\n\t\t  {t.Method.DeclaringType}.{t.Method.Name}()");

            if (duplicateTargets.NotNullNorEmpty())
            {
                analysis.WriteSetupErrorMessage(
                    $"There are multiple methods with same name: {duplicateTargets.Join(",\r\n\t\t")}.",
                    "Rename methods to avoid duplicates.");
            }

            // No conflict on attributes
            var targetMethodsWithAttributes = targets
                                              .SelectMany(GetInvokedMethods)
                                              .Distinct()
                                              .SelectMany(
                m =>
                m.GetCustomAttributes(true)
                .Select(a =>
                        (method: m,
                         attributeType: a.GetType(),
                         baseAttribute: _knownUniqueMemberLevelAttributes.FirstOrDefault(ka => ka.IsInstanceOfType(a)),
                         target: (a as TargetedAttribute)?.Target))
                .Where(t => t.baseAttribute != null))
                                              .ToArray();

            var conflictingAttributes = GetDuplicates(
                targetMethodsWithAttributes,
                t => $"{t.method.DeclaringType}.{t.method.Name}({t.target})",
                t => $"\r\n\t\t  {t.attributeType.FullName}");

            if (conflictingAttributes.NotNullNorEmpty())
            {
                analysis.WriteSetupErrorMessage(
                    $"There are conflicting attributes: {conflictingAttributes.Join(",\r\n\t\t")}.",
                    "There can be only one.");
            }

            // No multiple methods for an attribute
            var conflictingMethods = GetDuplicates(
                targetMethodsWithAttributes.Where(t => _knownUniqueTypeLevelAttributes.Contains(t.baseAttribute)),
                t => $"{t.baseAttribute.FullName}({t.target})",
                t => $"\r\n\t\t  {t.method.DeclaringType}.{t.method.Name}() ({t.attributeType.FullName})");

            if (conflictingMethods.NotNullNorEmpty())
            {
                analysis.WriteSetupErrorMessage(
                    $"There are conflicting methods: {conflictingMethods.Join(",\r\n\t\t")}.",
                    "Leave only one method for each attribute.");
            }
        }
Пример #2
0
 private static string[] GetTargetNames(
     ResultAnalysis analysis,
     Func <BenchmarkReport, bool> benchmarkReportFilter) =>
 analysis.Summary.GetSummaryOrderBenchmarks()
 .Select(b => analysis.Summary[b])
 .Where(r => r != null && r.ExecuteResults.Any() && benchmarkReportFilter(r))
 .Select(r => r.Benchmark.Target.MethodDisplayInfo)
 .Distinct()
 .ToArray();
Пример #3
0
        private static bool CheckBenchmark(
            Benchmark benchmark,
            CompetitionMetricValue metricValue,
            ResultAnalysis analysis)
        {
            var summary = analysis.Summary;
            var metric  = metricValue.Metric;

            var actualValues = metric.ValuesProvider.TryGetActualValues(benchmark, summary);

            if (actualValues.IsEmpty)
            {
                analysis.AddTestErrorConclusion(
                    benchmark.Target,
                    $"Could not obtain {metric} metric values for {benchmark.DisplayInfo}.",
                    summary[benchmark]);

                return(true);
            }

            if (metricValue.ValuesRange.ContainsWithRounding(actualValues, metricValue.DisplayMetricUnit))
            {
                return(true);
            }

            bool checkPassed;

            if (metricValue.ValuesRange.IsEmpty)
            {
                // Check passed if empty & adjustment is disabled.
                checkPassed = !analysis.Options.Adjustments.AdjustMetrics && !analysis.Options.Adjustments.ForceEmptyMetricsAdjustment;
            }
            else
            {
                analysis.AddTestErrorConclusion(
                    benchmark.Target,
                    $"Metric {metric} {actualValues.ToString(metric.MetricUnits)} is out of limit {metricValue}.",
                    summary[benchmark]);

                checkPassed = false;
            }

            if (PerformAdjustment(analysis, metricValue))
            {
                var limitValues = metric.ValuesProvider.TryGetLimitValues(benchmark, analysis.Summary);
                metricValue.UnionWith(
                    new CompetitionMetricValue(
                        metric,
                        limitValues,
                        metric.MetricUnits[limitValues]),
                    false);
            }

            return(checkPassed);
        }
Пример #4
0
        private static bool OnCheckTarget(
            [NotNull] CompetitionTarget competitionTarget,
            [NotNull] Benchmark[] benchmarksForTarget,
            [NotNull] ResultAnalysis analysis)
        {
            var result = true;

            foreach (var metricValue in competitionTarget.MetricValues)
            {
                foreach (var benchmark in benchmarksForTarget)
                {
                    result &= CheckBenchmark(benchmark, metricValue, analysis);
                }
            }
            return(result);
        }
Пример #5
0
        /// <summary>Checks preconditions for competition analysis.</summary>
        /// <param name="summary">Summary for the run.</param>
        /// <returns>Enumerable with warnings for the benchmarks.</returns>
        public IEnumerable <Conclusion> Analyse([NotNull] Summary summary)
        {
            Code.NotNull(summary, nameof(summary));

            var analysis       = new ResultAnalysis(Id, summary);
            var checkExecution = true;

            if (analysis.RunState.IsFirstRun)
            {
                checkExecution = CheckSetup(analysis);
            }

            if (checkExecution)
            {
                CheckExecution(analysis);
            }

            return(analysis.Conclusions.ToArray());
        }
Пример #6
0
        private static bool CheckSetup(ResultAnalysis analysis)
        {
            var summary = analysis.Summary;

            // DONTTOUCH: DO NOT add return into if clauses.
            // All conditions should be checked
            if (summary.HasCriticalValidationErrors)
            {
                analysis.WriteExecutionErrorMessage("Summary has validation errors.");
            }

            if (!summary.HasCriticalValidationErrors && summary.Benchmarks.IsNullOrEmpty())
            {
                analysis.WriteSetupErrorMessage(
                    "Nothing to check as there is no methods in benchmark.",
                    $"Apply one of {nameof(CompetitionBenchmarkAttribute)}, {nameof(CompetitionBaselineAttribute)} or {nameof(BenchmarkAttribute)} to the benchmark methods.");
            }

            if (summary.Config.GetJobs().Skip(1).Any())
            {
                analysis.WriteSetupErrorMessage(
                    "Benchmark configuration includes multiple jobs. " +
                    "This is not supported as there's no way to store metric annotations individually per each job.",
                    "Ensure that the config contains only one job.");
            }

            if (summary.Benchmarks.Select(b => b.Parameters).Distinct().Skip(1).Any())
            {
                analysis.WriteInfoMessage(
                    "Benchmark configuration includes multiple parameters. " +
                    "Note that results for each parameter set will be merged.");
            }

            CheckMembers(analysis);

            CheckMetrics(analysis);

            return(analysis.SafeToContinue);
        }
Пример #7
0
        /// <summary>Dumps validation messages.</summary>
        /// <param name="summary">Summary for the run.</param>
        /// <returns>Enumerable with warnings for the benchmarks.</returns>
        public IEnumerable <Conclusion> Analyse([NotNull] Summary summary)
        {
            var analysis = new ResultAnalysis(Id, summary, MessageSource.Validator);

            foreach (var validationError in summary.ValidationErrors)
            {
                var message = validationError.Benchmark == null
                                        ? validationError.Message
                                        : $"Benchmark {validationError.Benchmark.DisplayInfo}:{Environment.NewLine}\t{validationError.Message}";

                if (validationError.IsCritical)
                {
                    analysis.WriteSetupErrorMessage(message);
                }
                else
                {
                    analysis.WriteWarningMessage(message);
                }
            }

            return(analysis.Conclusions);
        }
Пример #8
0
        private static void CheckExecution(ResultAnalysis analysis)
        {
            var summary = analysis.Summary;

            // DONTTOUCH: DO NOT add return into if clauses.
            // All conditions should be checked

            var benchmarksWithReports = summary.Reports
                                        .Where(r => r.ExecuteResults.Any())
                                        .Select(r => r.Benchmark);

            var benchMissing = summary.GetSummaryOrderBenchmarks()
                               .Except(benchmarksWithReports)
                               .Select(b => b.Target.MethodDisplayInfo)
                               .Distinct().
                               ToArray();

            if (benchMissing.Any())
            {
                var benchmarks = benchMissing.Length == 1 ? "benchmark" : "benchmarks";
                analysis.WriteExecutionErrorMessage(
                    $"No result reports for {benchmarks}: {benchMissing.Join(", ")}.",
                    "Ensure that benchmarks were run successfully and did not throw any exceptions.");
            }

            var checksMode = analysis.Options.Checks;

            if (checksMode.CheckMetrics)
            {
                var timeUnits = MetricUnitScale.FromEnumValues(typeof(TimeUnit));

                if (checksMode.TooFastBenchmarkLimit > TimeSpan.Zero)
                {
                    var tooFastReports = GetTargetNames(
                        analysis,
                        r => r.GetResultRuns().Average(run => run.Nanoseconds) < checksMode.TooFastBenchmarkLimit.TotalNanoseconds());

                    if (tooFastReports.Any())
                    {
                        var benchmarks = tooFastReports.Length == 1 ? "Benchmark" : "Benchmarks";
                        var time       = checksMode.TooFastBenchmarkLimit
                                         .TotalNanoseconds()
                                         .ToString(timeUnits);
                        analysis.AddWarningConclusion(
                            $"{benchmarks} {tooFastReports.Join(", ")}: measured run time is less than {time}. " +
                            "Timings are imprecise as they are too close to the timer resolution.",
                            $"Timing limit for this warning is configured via {CompetitionCheckMode.TooFastBenchmarkLimitCharacteristic.FullId}.");
                    }
                }

                if (checksMode.LongRunningBenchmarkLimit > TimeSpan.Zero)
                {
                    var tooSlowReports = GetTargetNames(
                        analysis,
                        r => r.GetResultRuns().Average(run => run.Nanoseconds) > checksMode.LongRunningBenchmarkLimit.TotalNanoseconds());

                    if (tooSlowReports.Any())
                    {
                        var benchmarks = tooSlowReports.Length == 1 ? "Benchmark" : "Benchmarks";
                        var time       = checksMode.LongRunningBenchmarkLimit
                                         .TotalNanoseconds()
                                         .ToString(timeUnits);
                        analysis.AddWarningConclusion(
                            $"{benchmarks} {string.Join(", ", tooSlowReports)}: measured run time is greater than {time}. " +
                            "There's a risk the peak timings were hidden by averages. " +
                            "Consider to reduce the number of iterations performed per each measurement.",
                            $"Timing limit for this warning is configured via {CompetitionCheckMode.LongRunningBenchmarkLimitCharacteristic.FullId}.");
                    }
                }
            }
        }