private static bool CheckSetup(SummaryAnalysis analysis)
        {
            var summary = analysis.Summary;

            // DONTTOUCH: DO NOT add return into if clauses.
            // All conditions should be checked
            if (summary.HasCriticalValidationErrors)
            {
                analysis.WriteExecutionErrorMessage("Summary has validation errors.");
            }

            if (!summary.HasCriticalValidationErrors && summary.Benchmarks.IsNullOrEmpty())
            {
                analysis.WriteSetupErrorMessage(
                    "Nothing to check as there is no methods in benchmark.",
                    $"Apply one of {nameof(CompetitionBenchmarkAttribute)}, {nameof(CompetitionBaselineAttribute)} or {nameof(BenchmarkAttribute)} to the benchmark methods.");
            }

            if (summary.Config.GetJobs().Skip(1).Any())
            {
                analysis.WriteSetupErrorMessage(
                    "Benchmark configuration includes multiple jobs. " +
                    "This is not supported as there's no way to store metric annotations individually per each job.",
                    "Ensure that the config contains only one job.");
            }

            if (summary.Benchmarks.Select(b => b.Parameters).Distinct().Skip(1).Any())
            {
                analysis.WriteInfoMessage(
                    "Benchmark configuration includes multiple parameters. " +
                    "Note that results for each parameter set will be merged.");
            }

            CheckMembers(analysis);

            CheckMetrics(analysis);

            return(analysis.SafeToContinue);
        }
        private static void CheckExecution(SummaryAnalysis analysis)
        {
            var summary = analysis.Summary;

            // DONTTOUCH: DO NOT add return into if clauses.
            // All conditions should be checked

            var benchmarksWithReports = summary.Reports
                                        .Where(r => r.ExecuteResults.Any())
                                        .Select(r => r.Benchmark);

            var benchMissing = summary.GetSummaryOrderBenchmarks()
                               .Except(benchmarksWithReports)
                               .Select(b => b.Target.MethodDisplayInfo)
                               .Distinct().
                               ToArray();

            if (benchMissing.Any())
            {
                var benchmarks = benchMissing.Length == 1 ? "benchmark" : "benchmarks";
                analysis.WriteExecutionErrorMessage(
                    $"No result reports for {benchmarks}: {benchMissing.Join(", ")}.",
                    "Ensure that benchmarks were run successfully and did not throw any exceptions.");
            }

            var checksMode = analysis.Options.Checks;

            if (checksMode.CheckMetrics)
            {
                var timeUnits = MetricUnitScale.FromEnumValues(typeof(TimeUnit));

                if (checksMode.TooFastBenchmarkLimit > TimeSpan.Zero)
                {
                    var tooFastReports = GetTargetNames(
                        analysis,
                        r => r.GetResultRuns().Average(run => run.Nanoseconds) < checksMode.TooFastBenchmarkLimit.TotalNanoseconds());

                    if (tooFastReports.Any())
                    {
                        var benchmarks = tooFastReports.Length == 1 ? "Benchmark" : "Benchmarks";
                        var time       = checksMode.TooFastBenchmarkLimit
                                         .TotalNanoseconds()
                                         .ToString(timeUnits);
                        analysis.AddWarningConclusion(
                            $"{benchmarks} {tooFastReports.Join(", ")}: measured run time is less than {time}. " +
                            "Timings are imprecise as they are too close to the timer resolution.",
                            $"Timing limit for this warning is configured via {CompetitionCheckMode.TooFastBenchmarkLimitCharacteristic.FullId}.");
                    }
                }

                if (checksMode.LongRunningBenchmarkLimit > TimeSpan.Zero)
                {
                    var tooSlowReports = GetTargetNames(
                        analysis,
                        r => r.GetResultRuns().Average(run => run.Nanoseconds) > checksMode.LongRunningBenchmarkLimit.TotalNanoseconds());

                    if (tooSlowReports.Any())
                    {
                        var benchmarks = tooSlowReports.Length == 1 ? "Benchmark" : "Benchmarks";
                        var time       = checksMode.LongRunningBenchmarkLimit
                                         .TotalNanoseconds()
                                         .ToString(timeUnits);
                        analysis.AddWarningConclusion(
                            $"{benchmarks} {string.Join(", ", tooSlowReports)}: measured run time is greater than {time}. " +
                            "There's a risk the peak timings were hidden by averages. " +
                            "Consider to reduce the number of iterations performed per each measurement.",
                            $"Timing limit for this warning is configured via {CompetitionCheckMode.LongRunningBenchmarkLimitCharacteristic.FullId}.");
                    }
                }
            }
        }