public override void ExportToLog(Summary summary, ILogger logger)
        {
            logger.WriteLine("....");
            foreach (var infoLine in HostEnvironmentInfo.GetCurrent().ToFormattedString())
            {
                logger.WriteLineInfo(infoLine);
            }
            logger.WriteLineInfo(summary.JobRuntimes);
            logger.WriteLine();

            PrintTable(summary.Table, logger);

            var benchmarksWithTroubles = summary.Reports
                .Where(r => !r.GetResultRuns().Any())
                .Select(r => r.Benchmark)
                .ToList();

            if (benchmarksWithTroubles.Count > 0)
            {
                logger.WriteLine();
                logger.WriteLine("[WARNING]");
                logger.WriteLineError(".Benchmarks with issues");
                logger.WriteLine("====");
                foreach (var benchmarkWithTroubles in benchmarksWithTroubles)
                    logger.WriteLineError("* " + benchmarkWithTroubles.DisplayInfo);
                logger.WriteLine("====");
            }
        }
        public virtual bool IsSupported(BenchmarkCase benchmarkCase, ILogger logger, IResolver resolver)
        {
            var runtime = benchmarkCase.Job.ResolveValue(EnvironmentMode.RuntimeCharacteristic, resolver);
            var jit     = benchmarkCase.Job.ResolveValue(EnvironmentMode.JitCharacteristic, resolver);

            if (!(runtime is MonoRuntime) && jit == Jit.Llvm)
            {
                logger.WriteLineError($"Llvm is supported only for Mono, benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(false);
            }

            if (runtime is MonoRuntime mono && !benchmarkCase.GetToolchain().IsInProcess)
            {
                if (string.IsNullOrEmpty(mono.CustomPath) && !HostEnvironmentInfo.GetCurrent().IsMonoInstalled.Value)
                {
                    logger.WriteLineError($"Mono is not installed or added to PATH, benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                    return(false);
                }

                if (!string.IsNullOrEmpty(mono.CustomPath) && !File.Exists(mono.CustomPath))
                {
                    logger.WriteLineError($"We could not find Mono in provided path ({mono.CustomPath}), benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                    return(false);
                }
            }

            return(true);
        }
Exemple #3
0
        public override void ExportToLog(Summary summary, ILogger logger)
        {
            if (UseCodeBlocks)
            {
                logger.WriteLine(CodeBlockStart);
            }

            logger = GetRightLogger(logger);
            logger.WriteLine();
            foreach (string infoLine in summary.HostEnvironmentInfo.ToFormattedString())
            {
                logger.WriteLineInfo(infoLine);
            }

            logger.WriteLineInfo(summary.AllRuntimes);
            logger.WriteLine();

            PrintTable(summary.Table, logger);

            // TODO: move this logic to an analyser
            var benchmarksWithTroubles = summary.Reports.Where(r => !r.GetResultRuns().Any()).Select(r => r.BenchmarkCase).ToList();

            if (benchmarksWithTroubles.Count > 0)
            {
                logger.WriteLine();
                logger.WriteLineError("Benchmarks with issues:");
                foreach (var benchmarkWithTroubles in benchmarksWithTroubles)
                {
                    logger.WriteLineError("  " + benchmarkWithTroubles.DisplayInfo);
                }
            }
        }
Exemple #4
0
        public override bool IsSupported(Benchmark benchmark, ILogger logger, IResolver resolver)
        {
            if (!base.IsSupported(benchmark, logger, resolver))
            {
                return(false);
            }

            if (!RuntimeInformation.IsWindows())
            {
                logger.WriteLineError($"Classic .NET toolchain is supported only for Windows, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            if (!HostEnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            if (benchmark.Job.HasValue(EnvMode.JitCharacteristic) && benchmark.Job.Env.Jit == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

#if NETCOREAPP1_1
            if (benchmark.Job.HasValue(InfrastructureMode.EnvironmentVariablesCharacteristic))
            {
                logger.WriteLineError($"ProcessStartInfo.EnvironmentVariables is avaialable for .NET Core 2.0, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }
#endif

            return(true);
        }
Exemple #5
0
        public override bool IsSupported(BenchmarkCase benchmarkCase, ILogger logger, IResolver resolver)
        {
            if (!base.IsSupported(benchmarkCase, logger, resolver))
            {
                return(false);
            }

            if (InvalidCliPath(CustomDotNetCliPath, benchmarkCase, logger))
            {
                return(false);
            }

            if (benchmarkCase.Job.HasValue(EnvironmentMode.JitCharacteristic) && benchmarkCase.Job.ResolveValue(EnvironmentMode.JitCharacteristic, resolver) == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(false);
            }
            if (benchmarkCase.Job.ResolveValue(GcMode.CpuGroupsCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently project.json does not support CpuGroups (app.config does), benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(false);
            }
            if (benchmarkCase.Job.ResolveValue(GcMode.AllowVeryLargeObjectsCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently project.json does not support gcAllowVeryLargeObjects (app.config does), benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(false);
            }

            return(true);
        }
Exemple #6
0
        private static bool TryFindRScript(ILogger consoleLogger, out string rscriptPath)
        {
            string rscriptExecutable = RuntimeInformation.IsWindows() ? "Rscript.exe" : "Rscript";

            rscriptPath = null;
            string rHome = Environment.GetEnvironmentVariable("R_HOME");

            if (rHome != null)
            {
                rscriptPath = Path.Combine(rHome, "bin", rscriptExecutable);
                if (File.Exists(rscriptPath))
                {
                    return(true);
                }

                consoleLogger.WriteLineError($"RPlotExporter requires R_HOME to point to the parent directory of the existing '{Path.DirectorySeparatorChar}bin{Path.DirectorySeparatorChar}{rscriptExecutable} (currently points to {rHome})");
            }

            // No R_HOME, or R_HOME points to a wrong folder, try the path
            rscriptPath = FindInPath(rscriptExecutable);
            if (rscriptPath == null)
            {
                consoleLogger.WriteLineError($"RPlotExporter couldn't find {rscriptExecutable} in your PATH and no R_HOME environment variable is defined");
                return(false);
            }

            return(true);
        }
Exemple #7
0
        public override bool IsSupported(BenchmarkCase benchmarkCase, ILogger logger, IResolver resolver)
        {
            if (!base.IsSupported(benchmarkCase, logger, resolver))
            {
                return(false);
            }

            if (!benchmarkCase.Job.Environment.HasValue(EnvironmentMode.RuntimeCharacteristic) || !(benchmarkCase.Job.Environment.Runtime is MonoRuntime))
            {
                logger.WriteLineError("The MonoAOT toolchain requires the Runtime property to be configured explicitly to an instance of MonoRuntime class");
                return(false);
            }

            if ((benchmarkCase.Job.Environment.Runtime is MonoRuntime monoRuntime) && !string.IsNullOrEmpty(monoRuntime.MonoBclPath) && !Directory.Exists(monoRuntime.MonoBclPath))
            {
                logger.WriteLineError($"The MonoBclPath provided for MonoAOT toolchain: {monoRuntime.MonoBclPath} does NOT exist.");
                return(false);
            }

            if (benchmarkCase.Job.HasValue(InfrastructureMode.BuildConfigurationCharacteristic) &&
                benchmarkCase.Job.ResolveValue(InfrastructureMode.BuildConfigurationCharacteristic, resolver) != InfrastructureMode.ReleaseConfigurationName)
            {
                logger.WriteLineError("The MonoAOT toolchain does not allow to rebuild source project, so defining custom build configuration makes no sense");
                return(false);
            }

            if (benchmarkCase.Job.HasValue(InfrastructureMode.NuGetReferencesCharacteristic))
            {
                logger.WriteLineError("The MonoAOT toolchain does not allow specifying NuGet package dependencies");
                return(false);
            }

            return(true);
        }
Exemple #8
0
        public override bool IsSupported(BenchmarkCase benchmarkCase, ILogger logger, IResolver resolver)
        {
            if (!base.IsSupported(benchmarkCase, logger, resolver))
            {
                return(false);
            }

            if (benchmarkCase.Job.ResolveValue(GcMode.RetainVmCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently App.config does not support RetainVM option, benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(false);
            }

            if (benchmarkCase.Job.HasValue(InfrastructureMode.BuildConfigurationCharacteristic) &&
                benchmarkCase.Job.ResolveValue(InfrastructureMode.BuildConfigurationCharacteristic, resolver) != InfrastructureMode.ReleaseConfigurationName)
            {
                logger.WriteLineError("The Roslyn toolchain does not allow to rebuild source project, so defining custom build configuration makes no sense");
                return(false);
            }

            if (benchmarkCase.Job.HasValue(InfrastructureMode.NugetReferencesCharacteristic))
            {
                logger.WriteLineError("The Roslyn toolchain does not allow specifying Nuget package dependencies");
                return(false);
            }

            return(true);
        }
        public ExecuteResult Execute(BuildResult buildResult, Benchmark benchmark, ILogger logger, IResolver resolver, IDiagnoser diagnoser = null)
        {
            var executableName = $"{buildResult.ArtifactsPaths.ProgramName}.dll";

            if (!File.Exists(Path.Combine(buildResult.ArtifactsPaths.BinariesDirectoryPath, executableName)))
            {
                logger.WriteLineError($"Did not find {executableName} in {buildResult.ArtifactsPaths.BinariesDirectoryPath}, but the folder contained:");
                foreach (var file in new DirectoryInfo(buildResult.ArtifactsPaths.BinariesDirectoryPath).GetFiles("*.*"))
                {
                    logger.WriteLineError(file.Name);
                }

                return(new ExecuteResult(false, -1, Array.Empty <string>(), Array.Empty <string>()));
            }

            ConsoleHandler.EnsureInitialized(logger);

            try
            {
                return(Execute(benchmark, logger, buildResult.ArtifactsPaths, diagnoser, executableName));
            }
            finally
            {
                ConsoleHandler.Instance.ClearProcess();
            }
        }
Exemple #10
0
        private void VideoStreamWnd_Shown(object sender, EventArgs e)
        {
            try
            {
                project = ((MainForm)this.MdiParent).Project;
                VideoDataLine startDataLine = (VideoDataLine)dataStream.DataLines[0];
                OpenVideo(startDataLine);
                trackBar.Minimum = 0;
                trackBar.Maximum = Convert.ToInt32(dataStream.Length * reader.FrameRate);

                SetFrameByTime(dataStream.StartTime);
                this.Text = "Video Stream: " + dataStream.ShortName;
                Refresh();
            }
            catch (Exception ex)
            {
                reader = null;
                logger.WriteLineError(ex.ToString());
                MessageBox.Show("Error during opening the file: " + ex.Message, "Error during opening video file", MessageBoxButtons.OK, MessageBoxIcon.Error);
                DialogResult result = MessageBox.Show("Do you want to reload stream for appropriate video format?", "Reloading stream!", MessageBoxButtons.OKCancel, MessageBoxIcon.Exclamation);
                if (result == DialogResult.OK)
                {
                    dataStream.WriteMetadata(logger);
                    MessageBox.Show("Stream reloaded! Open stream again!", "Open stream again!", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
                this.Close();
            }
        }
Exemple #11
0
        public override bool IsSupported(Benchmark benchmark, ILogger logger, IResolver resolver)
        {
            if (!base.IsSupported(benchmark, logger, resolver))
            {
                return(false);
            }

            if (!RuntimeInformation.IsWindows())
            {
                logger.WriteLineError($"Classic .NET toolchain is supported only for Windows, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            if (!HostEnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            if (benchmark.Job.ResolveValue(EnvMode.JitCharacteristic, resolver) == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            return(true);
        }
Exemple #12
0
        public override void ExportToLog(Summary summary, ILogger logger)
        {
            logger.WriteLine("....");
            foreach (string infoLine in summary.HostEnvironmentInfo.ToFormattedString())
            {
                logger.WriteLineInfo(infoLine);
            }
            logger.WriteLineInfo(summary.AllRuntimes);
            logger.WriteLine();

            PrintTable(summary.Table, logger);

            var benchmarksWithTroubles = summary.Reports
                                         .Where(r => !r.GetResultRuns().Any())
                                         .Select(r => r.BenchmarkCase)
                                         .ToList();

            if (benchmarksWithTroubles.Count > 0)
            {
                logger.WriteLine();
                logger.WriteLine("[WARNING]");
                logger.WriteLineError(".Benchmarks with issues");
                logger.WriteLine("====");
                foreach (var benchmarkWithTroubles in benchmarksWithTroubles)
                {
                    logger.WriteLineError("* " + benchmarkWithTroubles.DisplayInfo);
                }
                logger.WriteLine("====");
            }
        }
        public void ExportToLog(Summary summary, ILogger logger)
        {
            if (useCodeBlocks)
            {
                logger.WriteLine($"```{codeBlocksSyntax}");
            }
            logger = GetRightLogger(logger);
            logger.WriteLine();
            foreach (var infoLine in HostEnvironmentInfo.GetCurrent().ToFormattedString())
            {
                logger.WriteLineInfo(infoLine);
            }
            logger.WriteLine();

            PrintTable(summary.Table, logger);

            // TODO: move this logic to an analyser
            var benchmarksWithTroubles = summary.Reports.Where(r => !r.GetResultRuns().Any()).Select(r => r.Benchmark).ToList();

            if (benchmarksWithTroubles.Count > 0)
            {
                logger.WriteLine();
                logger.WriteLineError("Benchmarks with issues:");
                foreach (var benchmarkWithTroubles in benchmarksWithTroubles)
                {
                    logger.WriteLineError("  " + benchmarkWithTroubles.ShortInfo);
                }
            }
        }
Exemple #14
0
        public override bool IsSupported(Benchmark benchmark, ILogger logger, IResolver resolver)
        {
            if (!base.IsSupported(benchmark, logger, resolver))
            {
                return(false);
            }

            if (!HostEnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            if (benchmark.Job.ResolveValue(EnvMode.JitCharacteristic, resolver) == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }
            if (benchmark.Job.ResolveValue(GcMode.CpuGroupsCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently project.json does not support CpuGroups (app.config does), benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }
            if (benchmark.Job.ResolveValue(GcMode.AllowVeryLargeObjectsCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently project.json does not support gcAllowVeryLargeObjects (app.config does), benchmark '{benchmark.DisplayInfo}' will not be executed");
                return(false);
            }

            return(true);
        }
        public IEnumerable <string> ExportToFiles(Summary summary, ILogger consoleLogger)
        {
            const string scriptFileName = "BuildPlots.R";

            yield return(scriptFileName);

            string fileNamePrefix = Path.Combine(summary.ResultsDirectoryPath, summary.Title);
            string scriptFullPath = Path.Combine(summary.ResultsDirectoryPath, scriptFileName);
            string script         = ResourceHelper.
                                    LoadTemplate(scriptFileName).
                                    Replace("$BenchmarkDotNetVersion$", BenchmarkDotNetInfo.FullTitle).
                                    Replace("$CsvSeparator$", CsvMeasurementsExporter.Default.Separator);

            lock (buildScriptLock)
                File.WriteAllText(scriptFullPath, script);

            string rscriptExecutable = RuntimeInformation.IsWindows() ? "Rscript.exe" : "Rscript";
            string rscriptPath;
            string rHome = Environment.GetEnvironmentVariable("R_HOME");

            if (rHome != null)
            {
                rscriptPath = Path.Combine(rHome, "bin", rscriptExecutable);
                if (!File.Exists(rscriptPath))
                {
                    consoleLogger.WriteLineError($"RPlotExporter requires R_HOME to point to the directory containing bin{Path.DirectorySeparatorChar}{rscriptExecutable} (currently points to {rHome})");
                    yield break;
                }
            }
            else // No R_HOME, try the path
            {
                rscriptPath = FindInPath(rscriptExecutable);
                if (rscriptPath == null)
                {
                    consoleLogger.WriteLineError($"RPlotExporter couldn't find {rscriptExecutable} in your PATH and no R_HOME environment variable is defined");
                    yield break;
                }
            }

            var start = new ProcessStartInfo
            {
                UseShellExecute        = false,
                RedirectStandardOutput = false,
                CreateNoWindow         = true,
                FileName         = rscriptPath,
                WorkingDirectory = summary.ResultsDirectoryPath,
                Arguments        = $"\"{scriptFullPath}\" \"{fileNamePrefix}-measurements.csv\""
            };

            using (var process = Process.Start(start))
                process?.WaitForExit();
            yield return(fileNamePrefix + "-boxplot.png");

            yield return(fileNamePrefix + "-barplot.png");
        }
Exemple #16
0
        private static List <ExecuteResult> Execute(ILogger logger, Benchmark benchmark, IToolchain toolchain, BuildResult buildResult, IConfig config)
        {
            var executeResults = new List <ExecuteResult>();

            logger.WriteLineInfo("// *** Execute ***");
            var launchCount = Math.Max(1, benchmark.Job.LaunchCount.IsAuto ? 2 : benchmark.Job.LaunchCount.Value);

            for (int processNumber = 0; processNumber < launchCount; processNumber++)
            {
                var printedProcessNumber = (benchmark.Job.LaunchCount.IsAuto && processNumber < 2) ? "" : " / " + launchCount.ToString();
                logger.WriteLineInfo($"// Launch: {processNumber + 1}{printedProcessNumber}");

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger);

                if (!executeResult.FoundExecutable)
                {
                    logger.WriteLineError("Executable not found");
                }
                executeResults.Add(executeResult);

                if (benchmark.Job.LaunchCount.IsAuto && processNumber == 1)
                {
                    var measurements = executeResults.
                                       SelectMany(r => r.Data).
                                       Select(line => Measurement.Parse(logger, line, 0)).
                                       Where(r => r != null).
                                       ToArray();
                    var idleApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.IdleTarget).Select(m => m.Nanoseconds)).Median;
                    var mainApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.MainTarget).Select(m => m.Nanoseconds)).Median;
                    var percent    = idleApprox / mainApprox * 100;
                    launchCount = (int)Math.Round(Math.Max(2, 2 + (percent - 1) / 3)); // an empirical formula
                }
            }
            logger.NewLine();

            // Do a "Diagnostic" run, but DISCARD the results, so that the overhead of Diagnostics doesn't skew the overall results
            if (config.GetDiagnosers().Count() > 0)
            {
                logger.WriteLineInfo($"// Run, Diagnostic");
                config.GetCompositeDiagnoser().Start(benchmark);
                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, config.GetCompositeDiagnoser());
                var allRuns       = executeResult.Data.Select(line => Measurement.Parse(logger, line, 0)).Where(r => r != null).ToList();
                var report        = new BenchmarkReport(benchmark, null, null, new[] { executeResult }, allRuns);
                config.GetCompositeDiagnoser().Stop(benchmark, report);

                if (!executeResult.FoundExecutable)
                {
                    logger.WriteLineError("Executable not found");
                }
                logger.NewLine();
            }

            return(executeResults);
        }
        // This method gives us a chance to make a "best-effort" to clean anything up after Ctrl-C is type in the Console
        private void HandlerCallback(object sender, ConsoleCancelEventArgs e)
        {
            Console.ResetColor();

            if (e.SpecialKey != ConsoleSpecialKey.ControlC && e.SpecialKey != ConsoleSpecialKey.ControlBreak)
            {
                return;
            }

            try
            {
                // Take a copy, in case SetProcess(..) is called whilst we are executing!
                var localProcess = process;

                if (HasProcessDied(localProcess))
                {
                    return;
                }

                logger?.WriteLineError($"Process {localProcess.ProcessName}.exe (Id:{localProcess.Id}) is still running, will now be killed");
                localProcess.Kill();

                if (HasProcessDied(localProcess))
                {
                    return;
                }

                // Give it a bit of time to exit!
                Thread.Sleep(500);

                if (HasProcessDied(localProcess))
                {
                    return;
                }

                var matchingProcess = Process.GetProcesses().FirstOrDefault(p => p.Id == localProcess.Id);
                if (matchingProcess == null || HasProcessDied(matchingProcess) || HasProcessDied(localProcess))
                {
                    return;
                }
                logger?.WriteLineError($"Process {matchingProcess.ProcessName}.exe (Id:{matchingProcess.Id}) has not exited after being killed!");
            }
            catch (InvalidOperationException invalidOpEx)
            {
                logger?.WriteLineError(invalidOpEx.Message);
            }
            catch (Exception ex)
            {
                logger?.WriteLineError(ex.ToString());
            }
        }
Exemple #18
0
        /// <summary>
        /// Parses the benchmark statistics from the plain text line.
        ///
        /// E.g. given the input <paramref name="line"/>:
        ///
        ///     Target 1: 10 op, 1005842518 ns
        ///
        /// Will extract the number of <see cref="Operations"/> performed and the
        /// total number of <see cref="Nanoseconds"/> it took to perform them.
        /// </summary>
        /// <param name="logger">The logger to write any diagnostic messages to.</param>
        /// <param name="line">The line to parse.</param>
        /// <param name="processIndex"></param>
        /// <param name="encoding">encoding to display value</param>
        /// <returns>An instance of <see cref="Measurement"/> if parsed successfully. <c>Null</c> in case of any trouble.</returns>
        public static Measurement Parse(ILogger logger, string line, int processIndex, Encoding encoding = null)
        {
            if (encoding == null)
            {
                encoding = Encoding.ASCII;
            }

            if (line != null && line.StartsWith(GcStats.ResultsLinePrefix))
            {
                return(Error(encoding));
            }

            try
            {
                var lineSplit = line.Split(new[] { ':' }, StringSplitOptions.RemoveEmptyEntries);

                var iterationInfo      = lineSplit[0];
                var iterationInfoSplit = iterationInfo.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
                var iterationMode      = ParseIterationMode(iterationInfoSplit[0]);
                int.TryParse(iterationInfoSplit[1], out int iterationIndex);

                var measurementsInfo      = lineSplit[1];
                var measurementsInfoSplit = measurementsInfo.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries);
                var op = 1L;
                var ns = double.PositiveInfinity;
                foreach (var item in measurementsInfoSplit)
                {
                    var measurementSplit = item.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
                    var value            = measurementSplit[0];
                    var unit             = measurementSplit[1];
                    switch (unit)
                    {
                    case "ns":
                        ns = double.Parse(value, HostEnvironmentInfo.MainCultureInfo);
                        break;

                    case "op":
                        op = long.Parse(value);
                        break;
                    }
                }
                return(new Measurement(processIndex, iterationMode, iterationIndex, op, ns, encoding));
            }
            catch (Exception)
            {
                logger.WriteLineError("Parse error in the following line:");
                logger.WriteLineError(line);
                return(Error(encoding));
            }
        }
        public override bool IsSupported(Benchmark benchmark, ILogger logger)
        {
            if (benchmark.Job.Platform == Platform.X86)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only X64 compilation, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }
            if (benchmark.Job.Jit == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            return true;
        }
        public override bool IsSupported(Benchmark benchmark, ILogger logger)
        {
            if (benchmark.Job.Platform == Platform.X86)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only X64 compilation, benchmark {benchmark.ShortInfo} will not be executed");
                return(false);
            }
            if (benchmark.Job.Jit == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark {benchmark.ShortInfo} will not be executed");
                return(false);
            }

            return(true);
        }
Exemple #21
0
        private void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLineError("There are no benchmarks found ");
                logger.NewLine();
                return;
            }
            table.PrintCommonColumns(logger);
            logger.NewLine();

            if (useCodeBlocks)
            {
                logger.Write("```");
                logger.NewLine();
            }

            table.PrintLine(table.FullHeader, logger, "", " |");
            logger.NewLine();
            logger.WriteLineStatistic(string.Join("", table.Columns.Where(c => c.NeedToShow).Select(c => new string('-', c.Width) + " |")));
            foreach (var line in table.FullContent)
            {
                table.PrintLine(line, logger, "", " |");
                logger.NewLine();
            }
        }
        private void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLineError("There are no benchmarks found ");
                logger.NewLine();
                return;
            }
            table.PrintCommonColumns(logger);
            logger.NewLine();

            if (useCodeBlocks)
            {
                logger.Write("```");
                logger.NewLine();
            }

            table.PrintLine(table.FullHeader, logger, "", " |");
            logger.NewLine();
            logger.WriteLineStatistic(string.Join("", table.Columns.Where(c => c.NeedToShow).Select(c => new string('-', c.Width) + " |")));
            foreach (var line in table.FullContent)
            {
                table.PrintLine(line, logger, "", " |");
                logger.NewLine();
            }
        }
Exemple #23
0
        protected virtual FileInfo GetProjectFilePath(Type benchmarkTarget, ILogger logger)
        {
            if (!GetSolutionRootDirectory(out var rootDirectory) && !GetProjectRootDirectory(out rootDirectory))
            {
                logger.WriteLineError(
                    $"Unable to find .sln or .csproj file. Will use current directory {Directory.GetCurrentDirectory()} to search for project file. If you don't use .sln file on purpose it should not be a problem.");
                rootDirectory = new DirectoryInfo(Directory.GetCurrentDirectory());
            }

            // important assumption! project's file name === output dll name
            string projectName = benchmarkTarget.GetTypeInfo().Assembly.GetName().Name;

            // I was afraid of using .GetFiles with some smart search pattern due to the fact that the method was designed for Windows
            // and now .NET is cross platform so who knows if the pattern would be supported for other OSes
            var possibleNames = new HashSet <string> {
                $"{projectName}.csproj", $"{projectName}.fsproj", $"{projectName}.vbproj"
            };
            var projectFile = rootDirectory
                              .EnumerateFiles("*.*", SearchOption.AllDirectories)
                              .FirstOrDefault(file => possibleNames.Contains(file.Name));

            if (projectFile == default(FileInfo))
            {
                throw new NotSupportedException(
                          $"Unable to find {projectName} in {rootDirectory.FullName} and its subfolders. Most probably the name of output exe is different than the name of the .(c/f)sproj");
            }
            return(projectFile);
        }
Exemple #24
0
        internal static (bool allTypesValid, IReadOnlyList <Type> runnable) GetTypesWithRunnableBenchmarks(IEnumerable <Type> types, IEnumerable <Assembly> assemblies, ILogger logger)
        {
            var validRunnableTypes = new List <Type>();

            foreach (var type in types)
            {
                if (type.ContainsRunnableBenchmarks())
                {
                    validRunnableTypes.AddRange(GenericBenchmarksBuilder.BuildGenericsIfNeeded(type).Where(tuple => tuple.isSuccess).Select(tuple => tuple.result));
                }
                else
                {
                    logger.WriteLineError($"Type {type} is invalid. Only public, non-generic (closed generic types with public parameterless ctors are supported), non-abstract, non-sealed, non-static types with public instance [Benchmark] method(s) are supported.");

                    return(false, Array.Empty <Type>());
                }
            }

            foreach (var assembly in assemblies)
            {
                validRunnableTypes.AddRange(GenericBenchmarksBuilder.GetRunnableBenchmarks(assembly.GetRunnableBenchmarks()));
            }

            return(true, validRunnableTypes);
        }
Exemple #25
0
        /// <summary>Changes the cpu affinity of the process.</summary>
        /// <param name="process">The target process.</param>
        /// <param name="processorAffinity">The processor affinity.</param>
        /// <param name="logger">The logger.</param>
        public static void SetAffinity(
            [NotNull] this Process process,
            IntPtr processorAffinity,
            [NotNull] ILogger logger)
        {
            if (process == null)
            {
                throw new ArgumentNullException(nameof(process));
            }

            if (logger == null)
            {
                throw new ArgumentNullException(nameof(logger));
            }

            try
            {
                process.ProcessorAffinity = processorAffinity;
            }
            catch (Exception ex)
            {
                logger.WriteLineError(
                    string.Format(
                        "// ! Failed to set up processor affinity 0x{1:X}. Make sure you have the right permissions. Message: {0}",
                        ex.Message,
                        (long)processorAffinity));
            }
        }
Exemple #26
0
        private ExecuteResult Execute(Process process, BenchmarkCase benchmarkCase, SynchronousProcessOutputLoggerWithDiagnoser loggerWithDiagnoser,
                                      ILogger logger, ConsoleExitHandler consoleExitHandler, int launchIndex)
        {
            logger.WriteLineInfo($"// Execute: {process.StartInfo.FileName} {process.StartInfo.Arguments} in {process.StartInfo.WorkingDirectory}");

            process.Start();

            process.EnsureHighPriority(logger);
            if (benchmarkCase.Job.Environment.HasValue(EnvironmentMode.AffinityCharacteristic))
            {
                process.TrySetAffinity(benchmarkCase.Job.Environment.Affinity, logger);
            }

            loggerWithDiagnoser.ProcessInput();

            if (!process.WaitForExit(milliseconds: (int)ExecuteParameters.ProcessExitTimeout.TotalMilliseconds))
            {
                logger.WriteLineInfo("// The benchmarking process did not quit on time, it's going to get force killed now.");

                consoleExitHandler.KillProcessTree();
            }

            if (loggerWithDiagnoser.LinesWithResults.Any(line => line.Contains("BadImageFormatException")))
            {
                logger.WriteLineError("You are probably missing <PlatformTarget>AnyCPU</PlatformTarget> in your .csproj file.");
            }

            return(new ExecuteResult(true, process.ExitCode, process.Id, loggerWithDiagnoser.LinesWithResults, loggerWithDiagnoser.LinesWithExtraOutput, launchIndex));
        }
Exemple #27
0
        private static void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLineError("<pre>There are no benchmarks found</pre>");
                return;
            }

            logger.Write("<pre><code>");
            table.PrintCommonColumns(logger);
            logger.WriteLine("</code></pre>");
            logger.WriteLine();

            logger.WriteLine("<table>");

            logger.Write("<thead>");
            logger.Write("<tr>");
            table.PrintLine(table.FullHeader, logger, "<th>", "</th>");
            logger.WriteLine("</tr>");
            logger.Write("</thead>");

            logger.Write("<tbody>");
            foreach (var line in table.FullContent)
            {
                logger.Write("<tr>");
                PrintLine(table, line, logger, "<td>", "</td>");
                logger.Write("</tr>");
            }
            logger.Write("</tbody>");

            logger.WriteLine("</table>");
        }
Exemple #28
0
        public static bool TrySetPriority(
            [NotNull] this Process process,
            ProcessPriorityClass priority,
            [NotNull] ILogger logger)
        {
            if (process == null)
            {
                throw new ArgumentNullException(nameof(process));
            }
            if (logger == null)
            {
                throw new ArgumentNullException(nameof(logger));
            }

            try
            {
                process.PriorityClass = priority;
                return(true);
            }
            catch (Exception ex)
            {
                logger.WriteLineError(
                    $"// ! Failed to set up priority {priority} for process {process}. Make sure you have the right permissions. Message: {ex.Message}");
            }

            return(false);
        }
        public IEnumerable <string> ExportToFiles(Summary summary, ILogger consoleLogger)
        {
            string fileName = GetFileName(summary);
            string filePath = GetAtrifactFullName(summary);

            if (File.Exists(filePath))
            {
                try
                {
                    File.Delete(filePath);
                }
                catch (IOException)
                {
                    var uniqueString        = System.DateTime.Now.ToString("yyyyMMdd-HHmmss");
                    var alternativeFilePath = $"{Path.Combine(summary.ResultsDirectoryPath, fileName)}-{FileCaption}{FileNameSuffix}-{uniqueString}.{FileExtension}";
                    consoleLogger.WriteLineError($"Could not overwrite file {filePath}. Exporting to {alternativeFilePath}");
                    filePath = alternativeFilePath;
                }
            }

            using (var stream = Portability.StreamWriter.FromPath(filePath))
            {
                ExportToLog(summary, new StreamLogger(stream));
            }

            return(new[] { filePath });
        }
Exemple #30
0
        private void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLineError("There are no benchmarks found ");
                logger.WriteLine();
                return;
            }

            table.PrintCommonColumns(logger);
            logger.WriteLine();

            if (useCodeBlocks)
            {
                logger.Write("```");
                logger.WriteLine();
            }

            table.PrintLine(table.FullHeader, logger, string.Empty, " |");
            logger.WriteLineStatistic(string.Join("", table.Columns.Where(c => c.NeedToShow).Select(c => new string('-', c.Width) + " |")));
            var rowCounter   = 0;
            var highlightRow = false;

            foreach (var line in table.FullContent)
            {
                // Each time we hit the start of a new group, alternative the colour (in the console) or display bold in Markdown
                if (table.FullContentStartOfGroup[rowCounter])
                {
                    highlightRow = !highlightRow;
                }

                table.PrintLine(line, logger, string.Empty, " |", highlightRow, table.FullContentStartOfGroup[rowCounter], startOfGroupInBold);
                rowCounter++;
            }
        }
Exemple #31
0
        public static bool TrySetAffinity(
            [NotNull] this Process process,
            IntPtr processorAffinity,
            [NotNull] ILogger logger)
        {
            if (process == null)
            {
                throw new ArgumentNullException(nameof(process));
            }
            if (logger == null)
            {
                throw new ArgumentNullException(nameof(logger));
            }

            try
            {
                process.ProcessorAffinity = FixAffinity(processorAffinity);
                return(true);
            }
            catch (Exception ex)
            {
                logger.WriteLineError(
                    $"// ! Failed to set up processor affinity 0x{(long)processorAffinity:X} for process {process}. Make sure you have the right permissions. Message: {ex.Message}");
            }

            return(false);
        }
Exemple #32
0
        private ExecuteResult Execute(Process process, Benchmark benchmark, SynchronousProcessOutputLoggerWithDiagnoser loggerWithDiagnoser, ILogger logger)
        {
            logger.WriteLineInfo("// Execute: " + process.StartInfo.FileName + " " + process.StartInfo.Arguments);

            ConsoleHandler.Instance.SetProcess(process);

            process.Start();

            process.EnsureHighPriority(logger);
            if (benchmark.Job.Env.HasValue(EnvMode.AffinityCharacteristic))
            {
                process.EnsureProcessorAffinity(benchmark.Job.Env.Affinity);
            }

            loggerWithDiagnoser.ProcessInput();

            process.WaitForExit(); // should we add timeout here?

            if (process.ExitCode == 0)
            {
                return(new ExecuteResult(true, process.ExitCode, loggerWithDiagnoser.LinesWithResults, loggerWithDiagnoser.LinesWithExtraOutput));
            }

            if (loggerWithDiagnoser.LinesWithResults.Any(line => line.Contains("BadImageFormatException")))
            {
                logger.WriteLineError("You are probably missing <PlatformTarget>AnyCPU</PlatformTarget> in your .csproj file.");
            }

            return(new ExecuteResult(true, process.ExitCode, Array.Empty <string>(), Array.Empty <string>()));
        }
        internal static bool InvalidCliPath(string customDotNetCliPath, BenchmarkCase benchmarkCase, ILogger logger)
        {
            if (string.IsNullOrEmpty(customDotNetCliPath) && !HostEnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli to be installed or path to local dotnet cli provided in explicit way using `--cli` argument, benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(true);
            }

            if (!string.IsNullOrEmpty(customDotNetCliPath) && !File.Exists(customDotNetCliPath))
            {
                logger.WriteLineError($"Provided custom dotnet cli path does not exist, benchmark '{benchmarkCase.DisplayInfo}' will not be executed");
                return(true);
            }

            return(false);
        }
        private void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLineError("<pre>There are no benchmarks found</pre>");
                return;
            }

            logger.Write("<pre><code>");
            table.PrintCommonColumns(logger);
            logger.WriteLine("</code></pre>");
            logger.WriteLine();

            logger.WriteLine("<table>");

            logger.Write("<tr>");
            table.PrintLine(table.FullHeader, logger, "<th>", "</th>");
            logger.Write("</tr>");

            foreach (var line in table.FullContent)
            {
                logger.Write("<tr>");
                table.PrintLine(line, logger, "<td>", "</td>");
                logger.Write("</tr>");
            }

            logger.WriteLine("</table>");
        }
Exemple #35
0
        public override bool IsSupported(Benchmark benchmark, ILogger logger)
        {
            if (!EnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            return true;
        }
        public virtual bool IsSupported(Benchmark benchmark, ILogger logger)
        {
            var runtime = benchmark.Job.Runtime == Runtime.Host ? RuntimeInformation.GetCurrent() : benchmark.Job.Runtime;
            if (runtime != Runtime.Mono && benchmark.Job.Jit == Jit.Llvm)
            {
                logger.WriteLineError($"Llvm is supported only for Mono, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            return true;
        }
 public static void EnsureHighPriority(this Process process, ILogger logger)
 {
     try
     {
         process.PriorityClass = ProcessPriorityClass.High;
     }
     catch (Exception ex)
     {
         logger.WriteLineError($"Failed to set up high priority. Make sure you have the right permissions. Message: {ex.Message}");
     }
 }
        public override bool IsSupported(Benchmark benchmark, ILogger logger)
        {
            if (!EnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            if (benchmark.Job.Platform == Platform.X86)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only X64 compilation, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }
            if (benchmark.Job.Jit == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            return true;
        }
        public virtual bool IsSupported(Benchmark benchmark, ILogger logger, IResolver resolver)
        {
            var runtime = benchmark.Job.ResolveValue(EnvMode.RuntimeCharacteristic, resolver);
            var jit = benchmark.Job.ResolveValue(EnvMode.JitCharacteristic, resolver);
            if (runtime != Runtime.Mono && jit == Jit.Llvm)
            {
                logger.WriteLineError($"Llvm is supported only for Mono, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return false;
            }

            return true;
        }
        public override bool IsSupported(Benchmark benchmark, ILogger logger, IResolver resolver)
        {
            if(!base.IsSupported(benchmark, logger, resolver))
            {
                return false;
            }

            if (!HostEnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return false;
            }

            if (benchmark.Job.ResolveValue(EnvMode.PlatformCharacteristic, resolver) == Platform.X86)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only X64 compilation, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return false;
            }
            if (benchmark.Job.ResolveValue(EnvMode.JitCharacteristic, resolver) == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark '{benchmark.DisplayInfo}' will not be executed");
                return false;
            }
            if (benchmark.Job.ResolveValue(GcMode.CpuGroupsCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently project.json does not support CpuGroups (app.config does), benchmark '{benchmark.DisplayInfo}' will not be executed");
                return false;
            }
            if (benchmark.Job.ResolveValue(GcMode.AllowVeryLargeObjectsCharacteristic, resolver))
            {
                logger.WriteLineError($"Currently project.json does not support gcAllowVeryLargeObjects (app.config does), benchmark '{benchmark.DisplayInfo}' will not be executed");
                return false;
            }

            return true;
        }
        public override bool IsSupported(Benchmark benchmark, ILogger logger)
        {
            if(!base.IsSupported(benchmark, logger))
            {
                return false;
            }

            if (!HostEnvironmentInfo.GetCurrent().IsDotNetCliInstalled())
            {
                logger.WriteLineError($"BenchmarkDotNet requires dotnet cli toolchain to be installed, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            if (benchmark.Job.Platform == Platform.X86)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only X64 compilation, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }
            if (benchmark.Job.Jit == Jit.LegacyJit)
            {
                logger.WriteLineError($"Currently dotnet cli toolchain supports only RyuJit, benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }
            if (benchmark.Job.GarbageCollection.CpuGroups)
            {
                logger.WriteLineError($"Currently project.json does not support CpuGroups (app.config does), benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }
            if (benchmark.Job.GarbageCollection.AllowVeryLargeObjects)
            {
                logger.WriteLineError($"Currently project.json does not support gcAllowVeryLargeObjects (app.config does), benchmark {benchmark.ShortInfo} will not be executed");
                return false;
            }

            return true;
        }
        private static void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLine("[WARNING]");
                logger.WriteLine("====");
                logger.WriteLineError("There are no benchmarks found ");
                logger.WriteLine("====");
                logger.WriteLine();
                return;
            }

            table.PrintCommonColumns(logger);
            logger.WriteLine("....");

            logger.WriteLine("[options=\"header\"]");
            logger.WriteLine("|===");
            table.PrintLine(table.FullHeader, logger, "|", string.Empty);
            foreach (var line in table.FullContent)
                table.PrintLine(line, logger, "|", string.Empty);
            logger.WriteLine("|===");
        }
        public override void ExportToLog(Summary summary, ILogger logger)
        {
            if (useCodeBlocks)
                logger.WriteLine($"```{codeBlocksSyntax}");
            logger = GetRightLogger(logger);
            logger.WriteLine();
            foreach (var infoLine in HostEnvironmentInfo.GetCurrent().ToFormattedString())
            {
                logger.WriteLineInfo(infoLine);
            }
            logger.WriteLine();

            PrintTable(summary.Table, logger);

            // TODO: move this logic to an analyser
            var benchmarksWithTroubles = summary.Reports.Where(r => !r.GetResultRuns().Any()).Select(r => r.Benchmark).ToList();
            if (benchmarksWithTroubles.Count > 0)
            {
                logger.WriteLine();
                logger.WriteLineError("Benchmarks with issues:");
                foreach (var benchmarkWithTroubles in benchmarksWithTroubles)
                    logger.WriteLineError("  " + benchmarkWithTroubles.ShortInfo);
            }
        }
 private static ValidationError[] Validate(IList<Benchmark> benchmarks, ILogger logger, IConfig config)
 {
     logger.WriteLineInfo("// Validating benchmarks:");
     var validationErrors = config.GetCompositeValidator().Validate(benchmarks).ToArray();
     foreach (var validationError in validationErrors)
     {
         logger.WriteLineError(validationError.Message);
     }
     return validationErrors;
 }
        private void PrintTable(SummaryTable table, ILogger logger)
        {
            if (table.FullContent.Length == 0)
            {
                logger.WriteLineError("There are no benchmarks found ");
                logger.WriteLine();
                return;
            }

            table.PrintCommonColumns(logger);
            logger.WriteLine();

            if (useCodeBlocks)
            {
                logger.Write("```");
                logger.WriteLine();
            }

            table.PrintLine(table.FullHeader, logger, string.Empty, " |");
            logger.WriteLineStatistic(string.Join("", table.Columns.Where(c => c.NeedToShow).Select(c => new string('-', c.Width) + " |")));
            var rowCounter = 0;
            var highlightRow = false;
            foreach (var line in table.FullContent)
            {
                // Each time we hit the start of a new group, alternative the colour (in the console) or display bold in Markdown
                if (table.FullContentStartOfGroup[rowCounter])
                {
                    highlightRow = !highlightRow;
                }

                table.PrintLine(line, logger, string.Empty, " |", highlightRow, table.FullContentStartOfGroup[rowCounter], startOfGroupInBold);
                rowCounter++;
            }
        }
        /// <summary>
        /// Parses the benchmark statistics from the plain text line.
        /// 
        /// E.g. given the input <paramref name="line"/>:
        /// 
        ///     Target 1: 10 op, 1005842518 ns
        /// 
        /// Will extract the number of <see cref="Operations"/> performed and the 
        /// total number of <see cref="Nanoseconds"/> it took to perform them.
        /// </summary>
        /// <param name="logger">The logger to write any diagnostic messages to.</param>
        /// <param name="line">The line to parse.</param>
        /// <returns>An instance of <see cref="Measurement"/> if parsed successfully. <c>Null</c> in case of any trouble.</returns>
        public static Measurement Parse(ILogger logger, string line, int processIndex)
        {
            try
            {
                var lineSplit = line.Split(new[] { ':' }, StringSplitOptions.RemoveEmptyEntries);

                var iterationInfo = lineSplit[0];
                var iterationInfoSplit = iterationInfo.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
                var iterationMode = ParseIterationMode(iterationInfoSplit[0]);
                var iterationIndex = 0;
                int.TryParse(iterationInfoSplit[1], out iterationIndex);

                var measurementsInfo = lineSplit[1];
                var measurementsInfoSplit = measurementsInfo.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries);
                var op = 1L;
                var ns = double.PositiveInfinity;
                foreach (var item in measurementsInfoSplit)
                {
                    var measurementSplit = item.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
                    var value = measurementSplit[0];
                    var unit = measurementSplit[1];
                    switch (unit)
                    {
                        case "ns":
                            ns = double.Parse(value, EnvironmentHelper.MainCultureInfo);
                            break;
                        case "op":
                            op = long.Parse(value);
                            break;
                    }
                }
                return new Measurement(processIndex, iterationMode, iterationIndex, op, ns);
            }
            catch (Exception)
            {
                logger.WriteLineError("Parse error in the following line:");
                logger.WriteLineError(line);
                return null;
            }
        }
 private void CopyFile(ILogger logger, string sourcePath, string destinationPath)
 {
     logger.WriteLineInfo("//   Copying {0}", Path.GetFileName(sourcePath));
     logger.WriteLineInfo("//   from: {0}", Path.GetDirectoryName(sourcePath));
     logger.WriteLineInfo("//   to: {0}", Path.GetDirectoryName(destinationPath));
     try
     {
         File.Copy(Path.GetFullPath(sourcePath), Path.GetFullPath(destinationPath), overwrite: true);
     }
     catch (Exception ex)
     {
         logger.WriteLineError(ex.Message);
         throw;
     }
 }
        private static List<ExecuteResult> Execute(ILogger logger, Benchmark benchmark, IToolchain toolchain, BuildResult buildResult, IConfig config, IResolver resolver)
        {
            var executeResults = new List<ExecuteResult>();

            logger.WriteLineInfo("// *** Execute ***");
            bool analyzeRunToRunVariance = benchmark.Job.ResolveValue(AccuracyMode.AnalyzeLaunchVarianceCharacteristic, resolver);
            bool autoLaunchCount = !benchmark.Job.HasValue(RunMode.LaunchCountCharacteristic);
            int defaultValue = analyzeRunToRunVariance ? 2 : 1;
            int launchCount = Math.Max(
                1,
                autoLaunchCount ? defaultValue: benchmark.Job.Run.LaunchCount);

            for (int launchIndex = 0; launchIndex < launchCount; launchIndex++)
            {
                string printedLaunchCount = (analyzeRunToRunVariance &&
                    autoLaunchCount &&
                    launchIndex < 2)
                    ? ""
                    : " / " + launchCount;
                logger.WriteLineInfo($"// Launch: {launchIndex + 1}{printedLaunchCount}");

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, resolver);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                if (executeResult.ExitCode != 0)
                    logger.WriteLineError("ExitCode != 0");
                executeResults.Add(executeResult);

                var measurements = executeResults
                        .SelectMany(r => r.Data)
                        .Select(line => Measurement.Parse(logger, line, 0))
                        .Where(r => r.IterationMode != IterationMode.Unknown).
                        ToArray();

                if (!measurements.Any())
                {
                    // Something went wrong during the benchmark, don't bother doing more runs
                    logger.WriteLineError($"No more Benchmark runs will be launched as NO measurements were obtained from the previous run!");
                    break;
                }

                if (autoLaunchCount && launchIndex == 1 && analyzeRunToRunVariance)
                {
                    // TODO: improve this logic
                    var idleApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.IdleTarget).Select(m => m.Nanoseconds)).Median;
                    var mainApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.MainTarget).Select(m => m.Nanoseconds)).Median;
                    var percent = idleApprox / mainApprox * 100;
                    launchCount = (int)Math.Round(Math.Max(2, 2 + (percent - 1) / 3)); // an empirical formula
                }
            }
            logger.WriteLine();

            // Do a "Diagnostic" run, but DISCARD the results, so that the overhead of Diagnostics doesn't skew the overall results
            if (config.GetDiagnosers().Any())
            {
                logger.WriteLineInfo("// Run, Diagnostic");
                var compositeDiagnoser = config.GetCompositeDiagnoser();

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, resolver, compositeDiagnoser);

                var allRuns = executeResult.Data.Select(line => Measurement.Parse(logger, line, 0)).Where(r => r.IterationMode != IterationMode.Unknown).ToList();
                var report = new BenchmarkReport(benchmark, null, null, new[] { executeResult }, allRuns);
                compositeDiagnoser.ProcessResults(benchmark, report);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                logger.WriteLine();
            }

            return executeResults;
        }
 private static GenerateResult Generate(ILogger logger, IToolchain toolchain, Benchmark benchmark, string rootArtifactsFolderPath, IConfig config)
 {
     logger.WriteLineInfo("// *** Generate *** ");
     var generateResult = toolchain.Generator.GenerateProject(benchmark, logger, rootArtifactsFolderPath, config);
     if (generateResult.IsGenerateSuccess)
     {
         logger.WriteLineInfo("// Result = Success");
         logger.WriteLineInfo($"// {nameof(generateResult.ArtifactsPaths.BinariesDirectoryPath)} = {generateResult.ArtifactsPaths?.BinariesDirectoryPath}");
     }
     else
     {
         logger.WriteLineError("// Result = Failure");
         if (generateResult.GenerateException != null)
             logger.WriteLineError($"// Exception: {generateResult.GenerateException.Message}");
     }
     logger.WriteLine();
     return generateResult;
 }
        private static Summary Run(Benchmark[] benchmarks, ILogger logger, string title, IConfig config, string rootArtifactsFolderPath, Func<IJob, IToolchain> toolchainProvider)
        {
            logger.WriteLineHeader("// ***** BenchmarkRunner: Start   *****");
            logger.WriteLineInfo("// Found benchmarks:");
            foreach (var benchmark in benchmarks)
                logger.WriteLineInfo($"//   {benchmark.ShortInfo}");
            logger.WriteLine();

            var validationErrors = Validate(benchmarks, logger, config);
            if (validationErrors.Any(validationError => validationError.IsCritical))
            {
                return Summary.CreateFailed(benchmarks, title, HostEnvironmentInfo.GetCurrent(), config, GetResultsFolderPath(rootArtifactsFolderPath), validationErrors);
            }

            var globalChronometer = Chronometer.Start();
            var reports = new List<BenchmarkReport>();
            foreach (var benchmark in benchmarks)
            {
                var report = Run(benchmark, logger, config, rootArtifactsFolderPath, toolchainProvider);
                reports.Add(report);
                if (report.GetResultRuns().Any())
                    logger.WriteLineStatistic(report.GetResultRuns().GetStatistics().ToTimeStr());

                logger.WriteLine();
            }
            var clockSpan = globalChronometer.Stop();

            var summary = new Summary(title, reports, HostEnvironmentInfo.GetCurrent(), config, GetResultsFolderPath(rootArtifactsFolderPath), clockSpan.GetTimeSpan(), validationErrors);

            logger.WriteLineHeader("// ***** BenchmarkRunner: Finish  *****");
            logger.WriteLine();

            logger.WriteLineHeader("// * Export *");
            var currentDirectory = Directory.GetCurrentDirectory();
            foreach (var file in config.GetCompositeExporter().ExportToFiles(summary))
            {
                logger.WriteLineInfo($"  {file.Replace(currentDirectory, string.Empty).Trim('/', '\\')}");
            }
            logger.WriteLine();

            logger.WriteLineHeader("// * Detailed results *");

            // TODO: make exporter
            foreach (var report in reports)
            {
                logger.WriteLineInfo(report.Benchmark.ShortInfo);
                logger.WriteLineStatistic(report.GetResultRuns().GetStatistics().ToTimeStr());
                logger.WriteLine();
            }

            LogTotalTime(logger, clockSpan.GetTimeSpan());
            logger.WriteLine();

            logger.WriteLineHeader("// * Summary *");
            MarkdownExporter.Console.ExportToLog(summary, logger);

            // TODO: make exporter
            var warnings = config.GetCompositeAnalyser().Analyse(summary).ToList();
            if (warnings.Count > 0)
            {
                logger.WriteLine();
                logger.WriteLineError("// * Warnings * ");
                foreach (var warning in warnings)
                    logger.WriteLineError($"{warning.Message}");
            }

            if (config.GetDiagnosers().Count() > 0)
            {
                logger.WriteLine();
                config.GetCompositeDiagnoser().DisplayResults(logger);
            }

            logger.WriteLine();
            logger.WriteLineHeader("// ***** BenchmarkRunner: End *****");
            return summary;
        }
        private static List<ExecuteResult> Execute(ILogger logger, Benchmark benchmark, IToolchain toolchain, BuildResult buildResult, IConfig config)
        {
            var executeResults = new List<ExecuteResult>();

            logger.WriteLineInfo("// *** Execute ***");
            var launchCount = Math.Max(1, benchmark.Job.LaunchCount.IsAuto ? 2 : benchmark.Job.LaunchCount.Value);

            for (int processNumber = 0; processNumber < launchCount; processNumber++)
            {
                var printedProcessNumber = (benchmark.Job.LaunchCount.IsAuto && processNumber < 2) ? "" : " / " + launchCount.ToString();
                logger.WriteLineInfo($"// Launch: {processNumber + 1}{printedProcessNumber}");

                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                executeResults.Add(executeResult);

                var measurements = executeResults
                        .SelectMany(r => r.Data)
                        .Select(line => Measurement.Parse(logger, line, 0))
                        .Where(r => r.IterationMode != IterationMode.Unknown).
                        ToArray();

                if (!measurements.Any())
                {
                    // Something went wrong during the benchmark, don't bother doing more runs
                    logger.WriteLineError($"No more Benchmark runs will be launched as NO measurements were obtained from the previous run!");
                    break;
                }

                if (benchmark.Job.LaunchCount.IsAuto && processNumber == 1)
                {
                    var idleApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.IdleTarget).Select(m => m.Nanoseconds)).Median;
                    var mainApprox = new Statistics(measurements.Where(m => m.IterationMode == IterationMode.MainTarget).Select(m => m.Nanoseconds)).Median;
                    var percent = idleApprox / mainApprox * 100;
                    launchCount = (int)Math.Round(Math.Max(2, 2 + (percent - 1) / 3)); // an empirical formula
                }
            }
            logger.WriteLine();

            // Do a "Diagnostic" run, but DISCARD the results, so that the overhead of Diagnostics doesn't skew the overall results
            if (config.GetDiagnosers().Count() > 0)
            {
                logger.WriteLineInfo($"// Run, Diagnostic");
                config.GetCompositeDiagnoser().Start(benchmark);
                var executeResult = toolchain.Executor.Execute(buildResult, benchmark, logger, config.GetCompositeDiagnoser());
                var allRuns = executeResult.Data.Select(line => Measurement.Parse(logger, line, 0)).Where(r => r.IterationMode != IterationMode.Unknown).ToList();
                var report = new BenchmarkReport(benchmark, null, null, new[] { executeResult }, allRuns);
                config.GetCompositeDiagnoser().Stop(benchmark, report);

                if (!executeResult.FoundExecutable)
                    logger.WriteLineError("Executable not found");
                logger.WriteLine();
            }

            return executeResults;
        }
 private static BuildResult Build(ILogger logger, IToolchain toolchain, GenerateResult generateResult, Benchmark benchmark)
 {
     logger.WriteLineInfo("// *** Build ***");
     var buildResult = toolchain.Builder.Build(generateResult, logger, benchmark);
     if (buildResult.IsBuildSuccess)
     {
         logger.WriteLineInfo("// Result = Success");
     }
     else
     {
         logger.WriteLineError("// Result = Failure");
         if (buildResult.BuildException != null)
             logger.WriteLineError($"// Exception: {buildResult.BuildException.Message}");
     }
     logger.WriteLine();
     return buildResult;
 }