public BenchmarkExecResult Execute(BenchmarkBuildResult buildResult, BenchmarkParameters parameters, IBenchmarkDiagnoser diagnoser) { var exeName = Path.Combine(buildResult.DirectoryPath, "Program.exe"); var args = parameters == null ? string.Empty : parameters.ToArgs(); if (File.Exists(exeName)) { var lines = new List <string>(); var startInfo = CreateStartInfo(exeName, args); using (var process = Process.Start(startInfo)) { if (process != null) { process.PriorityClass = ProcessPriorityClass.High; process.ProcessorAffinity = new IntPtr(2); string line; while ((line = process.StandardOutput.ReadLine()) != null) { logger?.WriteLine(line); if (!line.StartsWith("//") && !string.IsNullOrEmpty(line)) { lines.Add(line); } // Wait until we know "Warmup" is happening, and then dissassemble the process if (codeAlreadyExtracted == false && line.StartsWith("// Warmup") && !line.StartsWith("// Warmup (idle)")) { try { diagnoser.Print(benchmark, process, logger); } finally { // Always set this, even if something went wrong, otherwise we will try on every run of a benchmark batch codeAlreadyExtracted = true; } } } if (process.HasExited && process.ExitCode != 0) { if (logger != null) { logger.WriteError( $"Something bad happened during the execution of {exeName}. Try to run the benchmark again using an AnyCPU application\n"); } else { if (exeName.ToLowerInvariant() == "msbuild") { Console.WriteLine("Build failed"); } } return(new BenchmarkExecResult(true, new string[0])); } } } return(new BenchmarkExecResult(true, lines)); } return(new BenchmarkExecResult(false, new string[0])); }
public void CreateReport(BenchmarkParameters bp, List <ScenarioResult> resultList, Stopwatch timer) { var resultFilepath = Path.Combine(bp.DestinationFolder, Path.GetFileName(bp.MapFilepath) + $"{DateTime.Now:yyyyMMddHHmmss}" + ".result"); using var streamWriter = new StreamWriter(new FileStream(resultFilepath, FileMode.Create)); if (!resultList.Any()) { streamWriter.WriteLine($"{bp}"); streamWriter.WriteLine($"Did not find any scenarios in: {bp.ScenarioFilepath}"); streamWriter.Flush(); streamWriter.Close(); return; } var actualLength = resultList.Select(res => res.PathLength).Sum(); var expectedLength = resultList.Select(res => res.CorrectPathLength).Sum(); streamWriter.WriteLine($"{bp}"); streamWriter.WriteLine($"The overall % increase of the result path length is: {actualLength.PercentageDifference(expectedLength)}%"); streamWriter.WriteLine($"Runtime: {timer.Elapsed}"); if (bp.WriteResultIntoReport) { streamWriter.WriteLine(); foreach (var result in resultList) { streamWriter.WriteLine($"{result}"); streamWriter.WriteLine(); } } streamWriter.Flush(); streamWriter.Close(); }
private static void Compare(BenchmarkParameters parameters) { Console.WriteLine("=== Start and Compare ==="); var actions = new Action[] { ActionToTest1 }; var comparisons = Benchmarker.StartAndCompare(ActionToTest, parameters, actions); Console.WriteLine("Method comparisons executed against ActionToTest method:\n"); Console.WriteLine("Method\tTotal ms\tTotal ticks\tAvg. ms\tAvg. ticks\tGC0\tGC1\tGC2\t"); for (int i = 0; i < comparisons.Length; i++) { var comparison = comparisons[i]; var line = actions[i].Method.Name + "\t"; line += comparison.ElapsedMillisecondsDifferencePercentage + "% (" + comparison.ElapsedMillisecondsDifference + " ms)\t"; line += comparison.ElapsedTicksDifferencePercentage + "% (" + comparison.ElapsedTicksDifference + " ticks)\t"; line += comparison.AverageMillisecondsDifferencePercentage + "% (" + comparison.AverageMillisecondsDifference + " ms)\t"; line += comparison.AverageTicksDifferencePercentage + "% (" + comparison.AverageTicksDifference + " ticks)\t"; line += comparison.GCCollections0DifferencePercentage + "% (" + comparison.GCCollections0Difference + ")\t"; line += comparison.GCCollections1DifferencePercentage + "% (" + comparison.GCCollections1Difference + ")\t"; line += comparison.GCCollections2DifferencePercentage + "% (" + comparison.GCCollections2Difference + ")\t"; Console.WriteLine(line); } }
public IEnumerable <IParam> GetParameters() { IEnumerable <IParam> param; param = BenchmarkParameters.Select(p => new JemBenchmarkParam <TParam>(p)); return(param); }
private static void StartSingle(BenchmarkParameters parameters) { Console.WriteLine("=== Start Single ==="); var result = Benchmarker.Start(ActionToTest, parameters); Console.WriteLine(result.ToString()); }
public void BenchmarkGridScenarioSetUp() { BP = new BenchmarkParameters { MapFilepath = Path.Combine(GetFolderPath(SpecialFolder.MyDocuments), "PathfindingData", "BaldursGate", "Maps", MapName), ScenarioFilepath = Path.Combine(GetFolderPath(SpecialFolder.MyDocuments), "PathfindingData", "BaldursGate", "Scenarios", MapName + ".scen"), AmountOfScenarios = 50, WriteResultIntoReport = true }; }
private static void StartScope(BenchmarkParameters parameters) { Console.WriteLine("=== Start Scope ==="); var iterations = parameters.BenchmarkIterations; var result = new BenchmarkResult(); using (var benchmark = Benchmarker.StartScope(result)) { for (long i = 0; i < iterations; i++) { ActionToTest(); } } Console.WriteLine(result.ToString()); }
private static void StartMultiple(BenchmarkParameters parameters) { Console.WriteLine("=== Start Multiple ==="); var actions = new Action[] { ActionToTest, ActionToTest1 }; var results = Benchmarker.StartMultiple(actions, parameters); for (int i = 0; i < actions.Length; i++) { var result = results[i]; Console.WriteLine(result.ToString()); } }
public void BenchmarkRunner_WithAStarDiagonalDistance_ShouldRunAllScenariosCorrectly() { //Arrange var bp = new BenchmarkParameters { MapFilepath = Path.Combine(GetFolderPath(SpecialFolder.MyDocuments), "PathfindingData", "BaldursGate", "Maps", MapName), ScenarioFilepath = Path.Combine(GetFolderPath(SpecialFolder.MyDocuments), "PathfindingData", "BaldursGate", "Scenarios", MapName + ".scen"), AmountOfScenarios = 300, WriteResultIntoReport = true, Heuristic = new DiagonalDistanceHeuristic(), Reporter = new FileReporter() }; bp.Algorithm = new AStar(bp.Heuristic); //Act var testResult = new BenchmarkRunner().RunBenchmark(bp); //Assert Assert.True(testResult.All(res => res.Success)); }
private BenchmarkReport Run(IBenchmarkLogger logger, Benchmark benchmark, IList <string> importantPropertyNames, BenchmarkParameters parameters = null) { var toolchain = Plugins.CreateToolchain(benchmark, logger); logger.WriteLineHeader("// **************************"); logger.WriteLineHeader("// Benchmark: " + benchmark.Description); var generateResult = Generate(logger, toolchain); if (!generateResult.IsGenerateSuccess) { return(BenchmarkReport.CreateEmpty(benchmark, parameters)); } var buildResult = Build(logger, toolchain, generateResult); if (!buildResult.IsBuildSuccess) { return(BenchmarkReport.CreateEmpty(benchmark, parameters)); } var runReports = Execute(logger, benchmark, importantPropertyNames, parameters, toolchain, buildResult); return(new BenchmarkReport(benchmark, runReports, parameters)); }
public PureStringBuilder(BenchmarkParameters parameters) : base(parameters) { }
public TemplateBasedArrayStringBuilder(BenchmarkParameters parameters) : base(parameters) { }
public IEnumerable <IParam> GetParameters() => BenchmarkParameters.Select(p => new JemBenchmarkParam <TParam>(p));
public ContenderBase(BenchmarkParameters parameters) { this.Parameters = parameters; }
public static BenchmarkReport CreateEmpty(Benchmark benchmark, BenchmarkParameters parameters) => new BenchmarkReport(benchmark, new BenchmarkRunReport[0], parameters);
public TemplateBasedEnumStringbuilder(BenchmarkParameters parameters) : base(parameters) { }
public long Run(BenchmarkParameters n) { throw new NotImplementedException(); }
public void CreateReport(BenchmarkParameters bp, List <ScenarioResult> sortedList, Stopwatch timer) { }
public LargeContentCreationContenders(BenchmarkParameters parameters) { Candidates.Add(new PureStringBuilder(parameters)); Candidates.Add(new TemplateBasedStringBuilder(parameters)); Candidates.Add(new TemplateBasedArrayStringBuilder(parameters)); }
public BenchmarkExecResult Execute(BenchmarkBuildResult buildResult, BenchmarkParameters parameters, IBenchmarkDiagnoser diagnoser) { Done = true; return(new BenchmarkExecResult(true, new string[0])); }
private List <BenchmarkRunReport> Execute(IBenchmarkLogger logger, Benchmark benchmark, IList <string> importantPropertyNames, BenchmarkParameters parameters, IBenchmarkToolchainFacade toolchain, BenchmarkBuildResult buildResult) { logger.WriteLineInfo("// *** Execute ***"); var processCount = Math.Max(1, benchmark.Task.ProcessCount); var runReports = new List <BenchmarkRunReport>(); for (int processNumber = 0; processNumber < processCount; processNumber++) { logger.WriteLineInfo($"// Run, Process: {processNumber + 1} / {processCount}"); if (parameters != null) { logger.WriteLineInfo($"// {parameters.ToInfo()}"); } if (importantPropertyNames.Any()) { logger.WriteInfo("// "); foreach (var name in importantPropertyNames) { logger.WriteInfo($"{name}={benchmark.Properties.GetValue(name)} "); } logger.NewLine(); } var execResult = toolchain.Execute(buildResult, parameters, Plugins.CompositeDiagnoser); if (execResult.FoundExecutable) { var iterRunReports = execResult.Data.Select(line => BenchmarkRunReport.Parse(logger, line)).Where(r => r != null).ToList(); runReports.AddRange(iterRunReports); } else { logger.WriteLineError("Executable not found"); } } logger.NewLine(); return(runReports); }
public BenchmarkExecResult Execute(BenchmarkBuildResult buildResult, BenchmarkParameters parameters, IBenchmarkDiagnoser diagnoser) { return(executor.Execute(buildResult, parameters, diagnoser)); }
public BenchmarkReport(Benchmark benchmark, IList <BenchmarkRunReport> runs, BenchmarkParameters parameters = null) { Benchmark = benchmark; Runs = runs; Parameters = parameters; }