private void PrintTable(SummaryTable table, ILogger logger) { if (table.FullContent.Length == 0) { logger.WriteLineError("There are no benchmarks found "); logger.NewLine(); return; } table.PrintCommonColumns(logger); logger.NewLine(); if (useCodeBlocks) { logger.Write("```"); logger.NewLine(); } table.PrintLine(table.FullHeader, logger, "", " |"); logger.NewLine(); logger.WriteLineStatistic(string.Join("", table.Columns.Where(c => c.NeedToShow).Select(c => new string('-', c.Width) + " |"))); foreach (var line in table.FullContent) { table.PrintLine(line, logger, "", " |"); logger.NewLine(); } }
private static List <ExecuteResult> Execute(ILogger logger, Benchmark benchmark, IToolchain toolchain, BuildResult buildResult, IConfig config) { var executeResults = new List <ExecuteResult>(); logger.WriteLineInfo("// *** Execute ***"); var launchCount = Math.Max(1, benchmark.Job.LaunchCount.IsAuto ? 2 : benchmark.Job.LaunchCount.Value); for (int processNumber = 0; processNumber < launchCount; processNumber++) { var printedProcessNumber = (benchmark.Job.LaunchCount.IsAuto && processNumber < 2) ? "" : " / " + launchCount.ToString(); logger.WriteLineInfo($"// Launch: {processNumber + 1}{printedProcessNumber}"); var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger); if (!executeResult.FoundExecutable) { logger.WriteLineError("Executable not found"); } executeResults.Add(executeResult); if (benchmark.Job.LaunchCount.IsAuto && processNumber == 1) { var measurements = executeResults. SelectMany(r => r.Data). Select(line => Measurement.Parse(logger, line, 0)). Where(r => r != null). ToArray(); var idleApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.IdleTarget).Select(m => m.Nanoseconds)).Median; var mainApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.MainTarget).Select(m => m.Nanoseconds)).Median; var percent = idleApprox / mainApprox * 100; launchCount = (int)Math.Round(Math.Max(2, 2 + (percent - 1) / 3)); // an empirical formula } } logger.NewLine(); // Do a "Diagnostic" run, but DISCARD the results, so that the overhead of Diagnostics doesn't skew the overall results if (config.GetDiagnosers().Count() > 0) { logger.WriteLineInfo($"// Run, Diagnostic"); config.GetCompositeDiagnoser().Start(benchmark); var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, config.GetCompositeDiagnoser()); var allRuns = executeResult.Data.Select(line => Measurement.Parse(logger, line, 0)).Where(r => r != null).ToList(); var report = new BenchmarkReport(benchmark, null, null, new[] { executeResult }, allRuns); config.GetCompositeDiagnoser().Stop(benchmark, report); if (!executeResult.FoundExecutable) { logger.WriteLineError("Executable not found"); } logger.NewLine(); } return(executeResults); }
private void PrintTable(SummaryTable table, ILogger logger) { if (table.FullContent.Length == 0) { logger.WriteLineError("<pre>There are no benchmarks found</pre>"); return; } logger.Write("<pre><code>"); table.PrintCommonColumns(logger); logger.WriteLine("</code></pre>"); logger.NewLine(); logger.WriteLine("<table>"); var prefixLogger = new LoggerWithPrefix(logger, "\t"); prefixLogger.Write("<tr>"); table.PrintLine(table.FullHeader, prefixLogger, "<th>", "</th>"); prefixLogger.WriteLine("</tr>"); foreach (var line in table.FullContent) { prefixLogger.Write("<tr>"); table.PrintLine(line, prefixLogger, "<td>", "</td>"); prefixLogger.WriteLine("</tr>"); } logger.WriteLine("</table>"); }
public ValueTask ExecuteAsync(IConsole _) { _logger.Settings.DebugVerbosity = (int)Verbosity; _sw.Start(); _logger.LogInformation($"UniversalUnityHooks v{Program.Version}"); _logger.LogDebug($"System Version: {Environment.OSVersion}", 3); if (Target == null) { _logger.LogWarning("Target is null, defaulting to '?_Data/Managed/Assembly-CSharp.dll'."); Target = Util.FindAssemblyCSharp(Directory.GetCurrentDirectory()); } _logger.LogDebug($"Input: '{string.Join(",", Files)}'\nTarget: '{Target}'", 3); // TODO: More asserts, especially on input files CliAssert.IsRequired(Target, "Target Assembly (target,t)"); CliAssert.IsFile(Target); CliAssert.HasExtension(Target, ".dll"); _logger.LogDebug("Asserts passed, adding resolver...", 2); if (AddTargetDirectoryResolve) { ResolveDirectories.Add(Target.Directory); } var resolver = Util.CreateAssemblyResolver(ResolveDirectories); if (File.Exists(Target.FullName + ".clean")) { _logger.LogDebug($"IO: '.clean' File exists, overwriting target assembly with clean file...", 4); File.Delete(Target.FullName); File.Copy(Target.FullName + ".clean", Target.FullName, true); } _logger.LogDebug($"IO: Reading assembly from '{Target.FullName}'...", 4); var targetDefinition = AssemblyDefinition.ReadAssembly(Target.FullName, new ReaderParameters { AssemblyResolver = resolver }); var modules = new List <IModule>(); modules.Add(new HookModule()); modules.Add(new AddMethodModule()); modules.Add(new ILProcessorModule()); modules.Add(new Modules.LowLevelModule()); _logger.LogDebug($"{modules.Count} Module(s) loaded.", 2); Files = Util.FlattenDirectory(Files, "*.dll"); foreach (var input in Files) { ReadAndExecute(input, modules, targetDefinition); } _logger.NewLine(); if (!DryRun) { WriteChanges(targetDefinition); } if (CopyToTarget) { CopyInputFiles(); } _logger.LogInformation($"Operation completed. Operation took {_sw.ElapsedMilliseconds}ms."); return(default);
internal void Summary(EmailAccountUseSummary summary) { _log.NewLine(); foreach (var emailAccount in summary.TrackedEmailsUsed) { _log.WriteLine($"{ emailAccount.EmailServer.Capitalize() }: { emailAccount.Count }"); } _log.WriteLine($"Most used not tracked email: { summary.MostUsedNotTrackedEmailAccount.EmailServer }"); }
public void DisplayResults(ILogger logger) { foreach (var diagnoser in diagnosers) { // TODO when Diagnosers/Diagnostis are wired up properly, instead of the Type name, // print the name used on the cmd line, i.e. -d=<NAME> logger.WriteLineHeader($"// * Diagnostic Output - {diagnoser.GetType().Name} *"); diagnoser.DisplayResults(logger); logger.NewLine(); } }
public static void PrintCommonColumns(this SummaryTable table, ILogger logger) { var commonColumns = table.Columns.Where(c => !c.NeedToShow && !c.IsTrivial).ToArray(); if (commonColumns.Any()) { var paramsOnLine = 0; foreach (var column in commonColumns) { logger.WriteInfo($"{column.Header}={column.Content[0]} "); paramsOnLine++; if (paramsOnLine == 3) { logger.NewLine(); paramsOnLine = 0; } } if (paramsOnLine != 0) logger.NewLine(); } }
public static void PrintCommonColumns(this SummaryTable table, ILogger logger) { var commonColumns = table.Columns.Where(c => !c.NeedToShow && !c.IsTrivial).ToArray(); if (commonColumns.Any()) { var paramsOnLine = 0; foreach (var column in commonColumns) { logger.WriteInfo($"{column.Header}={column.Content[0]} "); paramsOnLine++; if (paramsOnLine == 3) { logger.NewLine(); paramsOnLine = 0; } } if (paramsOnLine != 0) { logger.NewLine(); } } }
public override void ExportToLog(Summary summary, ILogger logger) { foreach (var report in summary.Reports.Values) { var runs = report.AllMeasurements; var modes = runs.Select(it => it.IterationMode).Distinct(); logger.WriteLineHeader($"*** {report.Benchmark.ShortInfo} ***"); logger.WriteLineHeader("* Raw *"); foreach (var run in runs) logger.WriteLineResult(run.ToStr()); foreach (var mode in modes) { logger.NewLine(); logger.WriteLineHeader($"* Statistics for {mode}"); logger.WriteLineStatistic(runs.Where(it => it.IterationMode == mode).GetStatistics().ToTimeStr()); } } }
private static BuildResult Build(ILogger logger, IToolchain toolchain, GenerateResult generateResult, Benchmark benchmark) { logger.WriteLineInfo("// *** Build ***"); var buildResult = toolchain.Builder.Build(generateResult, logger, benchmark); if (buildResult.IsBuildSuccess) { logger.WriteLineInfo("// Result = Success"); } else { logger.WriteLineError("// Result = Failure"); if (buildResult.BuildException != null) { logger.WriteLineError($"// Exception: {buildResult.BuildException.Message}"); } } logger.NewLine(); return(buildResult); }
public override void ExportToLog(Summary summary, ILogger logger) { foreach (var report in summary.Reports.Values) { var runs = report.AllMeasurements; var modes = runs.Select(it => it.IterationMode).Distinct(); logger.WriteLineHeader($"*** {report.Benchmark.ShortInfo} ***"); logger.WriteLineHeader("* Raw *"); foreach (var run in runs) { logger.WriteLineResult(run.ToStr()); } foreach (var mode in modes) { logger.NewLine(); logger.WriteLineHeader($"* Statistics for {mode}"); logger.WriteLineStatistic(runs.Where(it => it.IterationMode == mode).GetStatistics().ToTimeStr()); } } }
private static GenerateResult Generate(ILogger logger, IToolchain toolchain, Benchmark benchmark) { logger.WriteLineInfo("// *** Generate *** "); var generateResult = toolchain.Generator.GenerateProject(benchmark, logger); if (generateResult.IsGenerateSuccess) { logger.WriteLineInfo("// Result = Success"); logger.WriteLineInfo($"// {nameof(generateResult.DirectoryPath)} = {generateResult.DirectoryPath}"); } else { logger.WriteLineError("// Result = Failure"); if (generateResult.GenerateException != null) { logger.WriteLineError($"// Exception: {generateResult.GenerateException.Message}"); } } logger.NewLine(); return(generateResult); }
private static Summary Run(IList <Benchmark> benchmarks, ILogger logger, string title, IConfig config) { var currentDirectory = Directory.GetCurrentDirectory(); logger.WriteLineHeader("// ***** BenchmarkRunner: Start *****"); logger.WriteLineInfo("// Found benchmarks:"); foreach (var benchmark in benchmarks) { logger.WriteLineInfo($"// {benchmark.ShortInfo}"); } logger.NewLine(); var globalChronometer = Chronometer.Start(); var reports = new List <BenchmarkReport>(); foreach (var benchmark in benchmarks) { var report = Run(benchmark, logger, config); reports.Add(report); if (report.GetResultRuns().Any()) { logger.WriteLineStatistic(report.GetResultRuns().GetStatistics().ToTimeStr()); } logger.NewLine(); } var clockSpan = globalChronometer.Stop(); var summary = new Summary(title, reports, EnvironmentHelper.GetCurrentInfo(), config, currentDirectory, clockSpan.GetTimeSpan()); logger.WriteLineHeader("// ***** BenchmarkRunner: Finish *****"); logger.NewLine(); logger.WriteLineHeader("// * Export *"); var files = config.GetCompositeExporter().ExportToFiles(summary); foreach (var file in files) { var printedFile = file.StartsWith(currentDirectory) ? file.Substring(currentDirectory.Length).Trim('/', '\\') : file; logger.WriteLineInfo($" {printedFile}"); } logger.NewLine(); logger.WriteLineHeader("// * Detailed results *"); // TODO: make exporter foreach (var report in reports) { logger.WriteLineInfo(report.Benchmark.ShortInfo); logger.WriteLineStatistic(report.GetResultRuns().GetStatistics().ToTimeStr()); logger.NewLine(); } LogTotalTime(logger, clockSpan.GetTimeSpan()); logger.NewLine(); if (config.GetDiagnosers().Count() > 0) { logger.NewLine(); config.GetCompositeDiagnoser().DisplayResults(logger); } logger.WriteLineHeader("// * Summary *"); MarkdownExporter.Default.ExportToLog(summary, logger); // TODO: make exporter var warnings = config.GetCompositeAnalyser().Analyze(summary).ToList(); if (warnings.Count > 0) { logger.NewLine(); logger.WriteLineError("// * Warnings * "); foreach (var warning in warnings) { logger.WriteLineError($"{warning.Message}"); } } logger.NewLine(); logger.WriteLineHeader("// ***** BenchmarkRunner: End *****"); return(summary); }