Пример #1
0
 private Summary(string title, HostEnvironmentInfo hostEnvironmentInfo, IConfig config, string resultsDirectoryPath, TimeSpan totalTime, ValidationError[] validationErrors, Benchmark[] benchmarks, BenchmarkReport[] reports)
     : this(title, hostEnvironmentInfo, config, resultsDirectoryPath, totalTime, validationErrors)
 {
     Benchmarks = benchmarks;
     Table = new SummaryTable(this);
     Reports = reports ?? new BenchmarkReport[0];
 }
        public override IEnumerable<Conclusion> AnalyseReport(BenchmarkReport report)
        {
            var actual = report.AllMeasurements.Where(m => m.IterationMode == IterationMode.MainTarget).ToArray();
            if (actual.IsEmpty())
                yield break;
            var result = report.AllMeasurements.Where(m => m.IterationMode == IterationMode.Result).ToArray();
            var actualOutliers = actual.GetStatistics().Outliers;
            bool removeOutliers = report.Benchmark.Job.ResolveValue(AccuracyMode.RemoveOutliersCharacteristic, EngineResolver.Instance); // TODO: improve

            if (result.Length + (actualOutliers.Length * (removeOutliers ? 1 : 0)) != actual.Length)
            {
                // This should never happen
                yield return CreateHint(
                    string.Format(
                        "Something went wrong with outliers: Size(MainTarget) = {0}, Size(MainTarget/Outliers) = {1}, Size(Result) = {2}), RemoveOutliers = {3}",
                        actual.Length, actualOutliers.Length, result.Length, removeOutliers),
                    report);
                yield break;
            }

            if (actualOutliers.Any())
            {
                int n = actualOutliers.Length;
                string words = n == 1 ? "outlier  was " : "outliers were";
                string verb = removeOutliers ? "removed" : "detected";
                yield return CreateHint($"{n} {words} {verb}", report);
            }
        }
Пример #3
0
 public void Stop(Benchmark benchmark, BenchmarkReport report)
 {
     var filePrefix = GetFileName("GC", report.Benchmark, benchmark.Parameters);
     var output = RunProcess("logman", string.Format("stop {0} -ets", filePrefix));
     if (output.Contains(ExecutedOkayMessage) == false)
         logger.WriteLineError("logman stop output\n" + output);
 }
Пример #4
0
 public void StdDevTest()
 {
     // TODO: write a mock for benchmark reports
     var report = new BenchmarkReport(
         new Benchmark(
             new BenchmarkTarget(null, null),
             new BenchmarkTask(1,
             new BenchmarkConfiguration(
                 BenchmarkMode.SingleRun,
                 BenchmarkPlatform.AnyCpu,
                 BenchmarkJitVersion.HostJit,
                 BenchmarkFramework.HostFramework,
                 BenchmarkToolchain.Classic,
                 BenchmarkRuntime.Clr,
                 1,
                 1))),
         new List<BenchmarkRunReport>
         {
             new BenchmarkRunReport(1, 10),
             new BenchmarkRunReport(1, 50),
             new BenchmarkRunReport(1, 100)
         });
     var reports = new[] { report };
     var warnings = new BenchmarkStdDevAnalyser().Analyze(reports).ToList();
     Assert.Equal(1, warnings.Count);
     foreach (var warning in warnings)
         output.WriteLine($"[{warning.Kind}] {warning.Message}");
 }
Пример #5
0
 private Conclusion([NotNull] string analyserId, ConclusionKind kind, [NotNull] string message, [CanBeNull] BenchmarkReport report)
 {
     AnalyserId = analyserId;
     Kind = kind;
     Message = message;
     Report = report;
 }
Пример #6
0
 private static string GetInfoFromOutput(this BenchmarkReport report, string prefix)
 {
     return((
                from executeResults in report.ExecuteResults
                from extraOutputLine in executeResults.ExtraOutput.Where(line => line.StartsWith(prefix))
                select extraOutputLine.Substring(prefix.Length)).FirstOrDefault());
 }
Пример #7
0
 private Summary(string title, HostEnvironmentInfo hostEnvironmentInfo, IConfig config, string resultsDirectoryPath, TimeSpan totalTime, ValidationError[] validationErrors)
 {
     Title = title;
     HostEnvironmentInfo = hostEnvironmentInfo;
     Config = config;
     ResultsDirectoryPath = resultsDirectoryPath;
     TotalTime = totalTime;
     ValidationErrors = validationErrors;
     Reports = new BenchmarkReport[0];
 }
Пример #8
0
 private Summary(string title, HostEnvironmentInfo hostEnvironmentInfo, IConfig config, string resultsDirectoryPath, TimeSpan totalTime, ValidationError[] validationErrors)
 {
     Title = title;
     HostEnvironmentInfo = hostEnvironmentInfo;
     Config = config;
     ResultsDirectoryPath = resultsDirectoryPath;
     TotalTime            = totalTime;
     ValidationErrors     = validationErrors;
     Reports = new BenchmarkReport[0];
 }
        public void Stop(Benchmark benchmark, BenchmarkReport report)
        {
            // ETW real-time sessions receive events with a slight delay. Typically it
            // shouldn't be more than a few seconds. This increases the likelihood that
            // all relevant events are processed by the collection thread by the time we
            // are done with the benchmark.
            Thread.Sleep(TimeSpan.FromSeconds(3));

            session.Dispose();
        }
Пример #10
0
        public void Stop(Benchmark benchmark, BenchmarkReport report)
        {
            // ETW real-time sessions receive events with a slight delay. Typically it
            // shouldn't be more than a few seconds. This increases the likelihood that
            // all relevant events are processed by the collection thread by the time we
            // are done with the benchmark.
            Thread.Sleep(TimeSpan.FromSeconds(3));

            session.Dispose();

            var stats = ProcessEtwEvents(report.AllMeasurements.Sum(m => m.Operations));
            results.Add(benchmark, stats);
        }
Пример #11
0
 public void Stop(Benchmark benchmark, BenchmarkReport report)
 {
     throw new InvalidOperationException(message);
 }
 public virtual IEnumerable<Conclusion> AnalyseReport(BenchmarkReport report) => Enumerable.Empty<Conclusion>();
Пример #13
0
 public static string GetHardwareIntrinsicsInfo(this BenchmarkReport report) => report.GetInfoFromOutput(DisplayedHardwareIntrinsicsPrefix);
Пример #14
0
 public static string GetGcInfo(this BenchmarkReport report) => report.GetInfoFromOutput(DisplayedGcInfoPrefix);
        private static List<ExecuteResult> Execute(ILogger logger, Benchmark benchmark, IToolchain toolchain, BuildResult buildResult, IConfig config)
        {
            var executeResults = new List<ExecuteResult>();

            logger.WriteLineInfo("// *** Execute ***");
            var launchCount = Math.Max(1, benchmark.Job.LaunchCount.IsAuto ? 2 : benchmark.Job.LaunchCount.Value);

            for (int processNumber = 0; processNumber < launchCount; processNumber++)
            {
                var printedProcessNumber = (benchmark.Job.LaunchCount.IsAuto && processNumber < 2) ? "" : " / " + launchCount.ToString();
                logger.WriteLineInfo($"// Launch: {processNumber + 1}{printedProcessNumber}");

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                executeResults.Add(executeResult);

                var measurements = executeResults
                        .SelectMany(r => r.Data)
                        .Select(line => Measurement.Parse(logger, line, 0))
                        .Where(r => r.IterationMode != IterationMode.Unknown).
                        ToArray();

                if (!measurements.Any())
                {
                    // Something went wrong during the benchmark, don't bother doing more runs
                    logger.WriteLineError($"No more Benchmark runs will be launched as NO measurements were obtained from the previous run!");
                    break;
                }

                if (benchmark.Job.LaunchCount.IsAuto && processNumber == 1)
                {
                    var idleApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.IdleTarget).Select(m => m.Nanoseconds)).Median;
                    var mainApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.MainTarget).Select(m => m.Nanoseconds)).Median;
                    var percent = idleApprox / mainApprox * 100;
                    launchCount = (int)Math.Round(Math.Max(2, 2 + (percent - 1) / 3)); // an empirical formula
                }
            }
            logger.WriteLine();

            // Do a "Diagnostic" run, but DISCARD the results, so that the overhead of Diagnostics doesn't skew the overall results
            if (config.GetDiagnosers().Count() > 0)
            {
                logger.WriteLineInfo($"// Run, Diagnostic");
                config.GetCompositeDiagnoser().Start(benchmark);
                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, config.GetCompositeDiagnoser());
                var allRuns = executeResult.Data.Select(line => Measurement.Parse(logger, line, 0)).Where(r => r.IterationMode != IterationMode.Unknown).ToList();
                var report = new BenchmarkReport(benchmark, null, null, new[] { executeResult }, allRuns);
                config.GetCompositeDiagnoser().Stop(benchmark, report);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                logger.WriteLine();
            }

            return executeResults;
        }
 public BenchmarkAnalysisWarning(string kind, string message, BenchmarkReport report)
 {
     Kind = kind;
     Message = message;
     Report = report;
 }
Пример #17
0
 public void Stop(Benchmark benchmark, BenchmarkReport report)
 {
     // Do nothing
 }
 public override IEnumerable<Conclusion> AnalyseReport(BenchmarkReport report)
 {
     if (report.Benchmark.Target.Type.GetTypeInfo().Assembly.IsDebug().IsTrue())
         yield return CreateWarning("Benchmark was built in DEBUG configuration. Please, build it in RELEASE.", report);
 }
        private static List<ExecuteResult> Execute(ILogger logger, Benchmark benchmark, IToolchain toolchain, BuildResult buildResult, IConfig config, IResolver resolver)
        {
            var executeResults = new List<ExecuteResult>();

            logger.WriteLineInfo("// *** Execute ***");
            bool analyzeRunToRunVariance = benchmark.Job.ResolveValue(AccuracyMode.AnalyzeLaunchVarianceCharacteristic, resolver);
            bool autoLaunchCount = !benchmark.Job.HasValue(RunMode.LaunchCountCharacteristic);
            int defaultValue = analyzeRunToRunVariance ? 2 : 1;
            int launchCount = Math.Max(
                1,
                autoLaunchCount ? defaultValue: benchmark.Job.Run.LaunchCount);

            for (int launchIndex = 0; launchIndex < launchCount; launchIndex++)
            {
                string printedLaunchCount = (analyzeRunToRunVariance &&
                    autoLaunchCount &&
                    launchIndex < 2)
                    ? ""
                    : " / " + launchCount;
                logger.WriteLineInfo($"// Launch: {launchIndex + 1}{printedLaunchCount}");

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, resolver);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                if (executeResult.ExitCode != 0)
                    logger.WriteLineError("ExitCode != 0");
                executeResults.Add(executeResult);

                var measurements = executeResults
                        .SelectMany(r => r.Data)
                        .Select(line => Measurement.Parse(logger, line, 0))
                        .Where(r => r.IterationMode != IterationMode.Unknown).
                        ToArray();

                if (!measurements.Any())
                {
                    // Something went wrong during the benchmark, don't bother doing more runs
                    logger.WriteLineError($"No more Benchmark runs will be launched as NO measurements were obtained from the previous run!");
                    break;
                }

                if (autoLaunchCount && launchIndex == 1 && analyzeRunToRunVariance)
                {
                    // TODO: improve this logic
                    var idleApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.IdleTarget).Select(m => m.Nanoseconds)).Median;
                    var mainApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.MainTarget).Select(m => m.Nanoseconds)).Median;
                    var percent = idleApprox / mainApprox * 100;
                    launchCount = (int)Math.Round(Math.Max(2, 2 + (percent - 1) / 3)); // an empirical formula
                }
            }
            logger.WriteLine();

            // Do a "Diagnostic" run, but DISCARD the results, so that the overhead of Diagnostics doesn't skew the overall results
            if (config.GetDiagnosers().Any())
            {
                logger.WriteLineInfo("// Run, Diagnostic");
                var compositeDiagnoser = config.GetCompositeDiagnoser();

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, resolver, compositeDiagnoser);

                var allRuns = executeResult.Data.Select(line => Measurement.Parse(logger, line, 0)).Where(r => r.IterationMode != IterationMode.Unknown).ToList();
                var report = new BenchmarkReport(benchmark, null, null, new[] { executeResult }, allRuns);
                compositeDiagnoser.ProcessResults(benchmark, report);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                logger.WriteLine();
            }

            return executeResults;
        }
 public void ProcessResults(Benchmark benchmark, BenchmarkReport report)
     => diagnosers.ForEach(diagnoser => diagnoser.ProcessResults(benchmark, report));
Пример #21
0
 public static IList <Measurement> GetResultRuns(this BenchmarkReport report) =>
 report.AllMeasurements.Where(r => r.IterationMode == IterationMode.Result).ToList();
 public void Stop(Benchmark benchmark, BenchmarkReport report)
 {
     foreach (var diagnoser in diagnosers)
         diagnoser.Stop(benchmark, report);
 }