public static Summary[] Run(BenchmarkRunInfo[] benchmarkRunInfos, IConfig commonSettingsConfig, bool summaryPerType) { var resolver = DefaultResolver; var artifactsToCleanup = new List <string>(); var title = GetTitle(benchmarkRunInfos); var rootArtifactsFolderPath = (commonSettingsConfig?.ArtifactsPath ?? DefaultConfig.Instance.ArtifactsPath).CreateIfNotExists(); using (var logStreamWriter = Portability.StreamWriter.FromPath(Path.Combine(rootArtifactsFolderPath, title + ".log"))) { var logger = new CompositeLogger(commonSettingsConfig.GetCompositeLogger(), new StreamLogger(logStreamWriter)); var supportedBenchmarks = GetSupportedBenchmarks(benchmarkRunInfos, logger, resolver); var validationErrors = Validate(supportedBenchmarks, logger); if (validationErrors.Any(validationError => validationError.IsCritical)) { return new [] { Summary.CreateFailed( supportedBenchmarks.SelectMany(b => b.Benchmarks).ToArray(), title, HostEnvironmentInfo.GetCurrent(), commonSettingsConfig, GetResultsFolderPath(rootArtifactsFolderPath), validationErrors) } } ; var buildPartitions = BenchmarkPartitioner.CreateForBuild(supportedBenchmarks, resolver); logger.WriteLineHeader("// ***** BenchmarkRunner: Start *****"); var globalChronometer = Chronometer.Start(); var buildResults = BuildInParallel(logger, rootArtifactsFolderPath, buildPartitions, ref globalChronometer); try { var results = new List <Summary>(); var benchmarkToBuildResult = buildResults .SelectMany(buildResult => buildResult.Key.Benchmarks.Select(buildInfo => (buildInfo.Benchmark, buildInfo.Id, buildResult.Value))) .ToDictionary(info => info.Benchmark, info => (info.Id, info.Value)); foreach (var benchmarkRunInfo in supportedBenchmarks) // we run them in the old order now using the new build artifacts { results.Add(Run(benchmarkRunInfo, benchmarkToBuildResult, resolver, logger, artifactsToCleanup, rootArtifactsFolderPath, ref globalChronometer)); } return(results.ToArray()); } finally { logger.WriteLineHeader("// * Artifacts cleanup *"); Cleanup(new HashSet <string>(artifactsToCleanup.Distinct())); } } }
internal static Summary[] Run(BenchmarkRunInfo[] benchmarkRunInfos) { var resolver = DefaultResolver; var artifactsToCleanup = new List <string>(); var title = GetTitle(benchmarkRunInfos); var rootArtifactsFolderPath = GetRootArtifactsFolderPath(benchmarkRunInfos); var resultsFolderPath = GetResultsFolderPath(rootArtifactsFolderPath, benchmarkRunInfos); var logFilePath = Path.Combine(rootArtifactsFolderPath, title + ".log"); using (var streamLogger = new StreamLogger(new StreamWriter(logFilePath, append: false))) { var compositeLogger = CreateCompositeLogger(benchmarkRunInfos, streamLogger); var supportedBenchmarks = GetSupportedBenchmarks(benchmarkRunInfos, compositeLogger, resolver); if (!supportedBenchmarks.Any(benchmarks => benchmarks.BenchmarksCases.Any())) { return new [] { Summary.NothingToRun(title, resultsFolderPath, logFilePath) } } ; var validationErrors = Validate(supportedBenchmarks, compositeLogger); if (validationErrors.Any(validationError => validationError.IsCritical)) { return new [] { Summary.ValidationFailed(title, resultsFolderPath, logFilePath, validationErrors) } } ; var benchmarksToRunCount = supportedBenchmarks.Sum(benchmarkInfo => benchmarkInfo.BenchmarksCases.Length); compositeLogger.WriteLineHeader("// ***** BenchmarkRunner: Start *****"); compositeLogger.WriteLineHeader($"// ***** Found {benchmarksToRunCount} benchmark(s) in total *****"); var globalChronometer = Chronometer.Start(); var buildPartitions = BenchmarkPartitioner.CreateForBuild(supportedBenchmarks, resolver); var buildResults = BuildInParallel(compositeLogger, rootArtifactsFolderPath, buildPartitions, ref globalChronometer); try { var results = new List <Summary>(); var benchmarkToBuildResult = buildResults .SelectMany(buildResult => buildResult.Key.Benchmarks.Select(buildInfo => (buildInfo.BenchmarkCase, buildInfo.Id, buildResult.Value))) .ToDictionary(info => info.BenchmarkCase, info => (info.Id, info.Value)); foreach (var benchmarkRunInfo in supportedBenchmarks) // we run them in the old order now using the new build artifacts { var runChronometer = Chronometer.Start(); var summary = Run(benchmarkRunInfo, benchmarkToBuildResult, resolver, compositeLogger, artifactsToCleanup, resultsFolderPath, logFilePath, ref runChronometer); if (!benchmarkRunInfo.Config.Options.IsSet(ConfigOptions.JoinSummary)) { PrintSummary(compositeLogger, benchmarkRunInfo.Config, summary); } benchmarksToRunCount -= benchmarkRunInfo.BenchmarksCases.Length; compositeLogger.WriteLineHeader($"// ** Remained {benchmarksToRunCount} benchmark(s) to run **"); LogTotalTime(compositeLogger, runChronometer.GetElapsed().GetTimeSpan(), summary.GetNumberOfExecutedBenchmarks(), message: "Run time"); compositeLogger.WriteLine(); results.Add(summary); if (benchmarkRunInfo.Config.Options.IsSet(ConfigOptions.StopOnFirstError) && summary.Reports.Any(report => !report.Success)) { break; } } if (supportedBenchmarks.Any(b => b.Config.Options.IsSet(ConfigOptions.JoinSummary))) { var joinedSummary = Summary.Join(results, globalChronometer.GetElapsed()); PrintSummary(compositeLogger, supportedBenchmarks.First(b => b.Config.Options.IsSet(ConfigOptions.JoinSummary)).Config, joinedSummary); results.Clear(); results.Add(joinedSummary); } var totalTime = globalChronometer.GetElapsed().GetTimeSpan(); int totalNumberOfExecutedBenchmarks = results.Sum(summary => summary.GetNumberOfExecutedBenchmarks()); LogTotalTime(compositeLogger, totalTime, totalNumberOfExecutedBenchmarks, "Global total time"); return(results.ToArray()); } finally { compositeLogger.WriteLineHeader("// * Artifacts cleanup *"); Cleanup(new HashSet <string>(artifactsToCleanup.Distinct())); compositeLogger.Flush(); } } }
[PublicAPI] public static Summary[] Run(BenchmarkRunInfo[] benchmarkRunInfos, [CanBeNull] IConfig commonSettingsConfig) { if (commonSettingsConfig == null) { commonSettingsConfig = DefaultConfig.Instance; } var resolver = DefaultResolver; var artifactsToCleanup = new List <string>(); string title = GetTitle(benchmarkRunInfos); string rootArtifactsFolderPath = (commonSettingsConfig?.ArtifactsPath ?? DefaultConfig.Instance.ArtifactsPath).CreateIfNotExists(); using (var logStreamWriter = StreamWriter.FromPath(Path.Combine(rootArtifactsFolderPath, title + ".log"))) { var logger = new CompositeLogger(commonSettingsConfig.GetCompositeLogger(), new StreamLogger(logStreamWriter)); var supportedBenchmarks = GetSupportedBenchmarks(benchmarkRunInfos, logger, resolver); if (!supportedBenchmarks.Any(benchmarks => benchmarks.BenchmarksCases.Any())) { return new [] { Summary.CreateFailed( supportedBenchmarks.SelectMany(b => b.BenchmarksCases).ToArray(), title, HostEnvironmentInfo.GetCurrent(), commonSettingsConfig, GetResultsFolderPath(rootArtifactsFolderPath), Array.Empty <ValidationError>()) } } ; var validationErrors = Validate(supportedBenchmarks, logger); if (validationErrors.Any(validationError => validationError.IsCritical)) { return new [] { Summary.CreateFailed( supportedBenchmarks.SelectMany(b => b.BenchmarksCases).ToArray(), title, HostEnvironmentInfo.GetCurrent(), commonSettingsConfig, GetResultsFolderPath(rootArtifactsFolderPath), validationErrors) } } ; var buildPartitions = BenchmarkPartitioner.CreateForBuild(supportedBenchmarks, resolver); logger.WriteLineHeader("// ***** BenchmarkRunner: Start *****"); var globalChronometer = Chronometer.Start(); var buildResults = BuildInParallel(logger, rootArtifactsFolderPath, buildPartitions, ref globalChronometer); try { var results = new List <Summary>(); var benchmarkToBuildResult = buildResults .SelectMany(buildResult => buildResult.Key.Benchmarks.Select(buildInfo => (buildInfo.BenchmarkCase, buildInfo.Id, buildResult.Value))) .ToDictionary(info => info.BenchmarkCase, info => (info.Id, info.Value)); foreach (var benchmarkRunInfo in supportedBenchmarks) // we run them in the old order now using the new build artifacts { var runChronometer = Chronometer.Start(); var summary = Run(benchmarkRunInfo, benchmarkToBuildResult, resolver, logger, artifactsToCleanup, rootArtifactsFolderPath, ref runChronometer); if (commonSettingsConfig == null || commonSettingsConfig.SummaryPerType) { PrintSummary(logger, benchmarkRunInfo.Config, summary); } LogTotalTime(logger, runChronometer.GetElapsed().GetTimeSpan(), summary.GetNumberOfExecutedBenchmarks(), message: "Run time"); logger.WriteLine(); results.Add(summary); } if (commonSettingsConfig != null && !commonSettingsConfig.SummaryPerType) { var joinedSummary = Summary.Join(results, commonSettingsConfig, globalChronometer.GetElapsed()); PrintSummary(logger, commonSettingsConfig, joinedSummary); results.Clear(); results.Add(joinedSummary); } return(results.ToArray()); } finally { logger.WriteLineHeader("// * Artifacts cleanup *"); Cleanup(new HashSet <string>(artifactsToCleanup.Distinct())); } } }
internal static Summary[] Run(BenchmarkRunInfo[] benchmarkRunInfos) { var resolver = DefaultResolver; var artifactsToCleanup = new List <string>(); var title = GetTitle(benchmarkRunInfos); var rootArtifactsFolderPath = GetRootArtifactsFolderPath(benchmarkRunInfos); var resultsFolderPath = GetResultsFolderPath(rootArtifactsFolderPath, benchmarkRunInfos); var logFilePath = Path.Combine(rootArtifactsFolderPath, title + ".log"); using (var streamLogger = new StreamLogger(GetLogFileStreamWriter(benchmarkRunInfos, logFilePath))) { var compositeLogger = CreateCompositeLogger(benchmarkRunInfos, streamLogger); var supportedBenchmarks = GetSupportedBenchmarks(benchmarkRunInfos, compositeLogger, resolver); if (!supportedBenchmarks.Any(benchmarks => benchmarks.BenchmarksCases.Any())) { return new[] { Summary.NothingToRun(title, resultsFolderPath, logFilePath) } } ; var validationErrors = Validate(supportedBenchmarks, compositeLogger); if (validationErrors.Any(validationError => validationError.IsCritical)) { return new[] { Summary.ValidationFailed(title, resultsFolderPath, logFilePath, validationErrors) } } ; int totalBenchmarkCount = supportedBenchmarks.Sum(benchmarkInfo => benchmarkInfo.BenchmarksCases.Length); int benchmarksToRunCount = totalBenchmarkCount; compositeLogger.WriteLineHeader("// ***** BenchmarkRunner: Start *****"); compositeLogger.WriteLineHeader($"// ***** Found {totalBenchmarkCount} benchmark(s) in total *****"); var globalChronometer = Chronometer.Start(); var buildPartitions = BenchmarkPartitioner.CreateForBuild(supportedBenchmarks, resolver); var buildResults = BuildInParallel(compositeLogger, rootArtifactsFolderPath, buildPartitions, in globalChronometer); var allBuildsHaveFailed = buildResults.Values.All(buildResult => !buildResult.IsBuildSuccess); try { var results = new List <Summary>(); var benchmarkToBuildResult = buildResults .SelectMany(buildResult => buildResult.Key.Benchmarks.Select(buildInfo => (buildInfo.BenchmarkCase, buildInfo.Id, buildResult.Value))) .ToDictionary(info => info.BenchmarkCase, info => (info.Id, info.Value)); // used to estimate finish time, in contrary to globalChronometer it does not include build time var runsChronometer = Chronometer.Start(); foreach (var benchmarkRunInfo in supportedBenchmarks) // we run them in the old order now using the new build artifacts { var summary = Run(benchmarkRunInfo, benchmarkToBuildResult, resolver, compositeLogger, artifactsToCleanup, resultsFolderPath, logFilePath, totalBenchmarkCount, in runsChronometer, ref benchmarksToRunCount); if (!benchmarkRunInfo.Config.Options.IsSet(ConfigOptions.JoinSummary)) { PrintSummary(compositeLogger, benchmarkRunInfo.Config, summary); } LogTotalTime(compositeLogger, summary.TotalTime, summary.GetNumberOfExecutedBenchmarks(), message: "Run time"); compositeLogger.WriteLine(); results.Add(summary); if ((benchmarkRunInfo.Config.Options.IsSet(ConfigOptions.StopOnFirstError) && summary.Reports.Any(report => !report.Success)) || allBuildsHaveFailed) { break; } } if (supportedBenchmarks.Any(b => b.Config.Options.IsSet(ConfigOptions.JoinSummary))) { var joinedSummary = Summary.Join(results, runsChronometer.GetElapsed()); PrintSummary(compositeLogger, supportedBenchmarks.First(b => b.Config.Options.IsSet(ConfigOptions.JoinSummary)).Config, joinedSummary); results.Clear(); results.Add(joinedSummary); } var totalTime = globalChronometer.GetElapsed().GetTimeSpan(); int totalNumberOfExecutedBenchmarks = results.Sum(summary => summary.GetNumberOfExecutedBenchmarks()); LogTotalTime(compositeLogger, totalTime, totalNumberOfExecutedBenchmarks, "Global total time"); return(results.ToArray()); } finally { // some benchmarks might be using parameters that have locking finalizers // so we need to dispose them after we are done running the benchmarks // see https://github.com/dotnet/BenchmarkDotNet/issues/1383 and https://github.com/dotnet/runtime/issues/314 for more foreach (var benchmarkInfo in benchmarkRunInfos) { benchmarkInfo.Dispose(); } compositeLogger.WriteLineHeader("// * Artifacts cleanup *"); Cleanup(new HashSet <string>(artifactsToCleanup.Distinct())); compositeLogger.Flush(); } } }