コード例 #1
0
ファイル: Program.cs プロジェクト: DrewScoggins/performance-2
        static IEnumerable <BenchmarkConfiguration> GetBenchmarkConfigurations(CommandLineOptions options)
        {
            string tieredEnv  = Environment.GetEnvironmentVariable("COMPLUS_TieredCompilation");
            string minoptsEnv = Environment.GetEnvironmentVariable("COMPLUS_JitMinopts");
            string r2rEnv     = Environment.GetEnvironmentVariable("COMPLUS_ReadyToRun");
            string ngenEnv    = Environment.GetEnvironmentVariable("COMPLUS_ZapDisable");
            BenchmarkConfiguration envConfig = new BenchmarkConfiguration();

            if (tieredEnv != null && tieredEnv != "0")
            {
                envConfig.WithTiering();
            }
            if (minoptsEnv != null && minoptsEnv != "0")
            {
                envConfig.WithMinOpts();
            }
            if (r2rEnv != null && r2rEnv != "1")
            {
                envConfig.WithNoR2R();
            }
            if (ngenEnv != null && ngenEnv != "0")
            {
                envConfig.WithNoNgen();
            }

            string[] configNames = options.Configs.Distinct().ToArray();
            if (!envConfig.IsDefault && configNames.Length != 0)
            {
                throw new Exception("ERROR: Benchmarks cannot be configured via both environment variables and the --configs command line option at the same time. Use one or the other.");
            }
            if (configNames.Length == 0)
            {
                yield return(envConfig);

                yield break;
            }

            BenchmarkConfiguration[] possibleConfigs = new BenchmarkConfiguration[]
            {
                new BenchmarkConfiguration(),
                new BenchmarkConfiguration().WithTiering(),
                new BenchmarkConfiguration().WithMinOpts(),
                new BenchmarkConfiguration().WithNoR2R(),
                new BenchmarkConfiguration().WithNoNgen()
            };
            foreach (string configName in configNames)
            {
                BenchmarkConfiguration config = possibleConfigs.Where(c => c.Name.Equals(configName, StringComparison.OrdinalIgnoreCase)).FirstOrDefault();
                if (config == null)
                {
                    throw new ArgumentException("Unrecognized config value: " + configName);
                }
                else
                {
                    yield return(config);
                }
            }
        }
コード例 #2
0
 public BenchmarkRunResult(Benchmark benchmark, BenchmarkConfiguration configuration)
 {
     Benchmark        = benchmark;
     Configuration    = configuration;
     IterationResults = new List <IterationResult>();
 }
コード例 #3
0
        BenchmarkRunResult MeasureIterations(TestRun run, BenchmarkConfiguration config, ITestOutputHelper output)
        {
            // The XunitPerformanceHarness is hardcoded to log to the console. It would be nice if the output was configurable somehow
            // but in lieue of that we can redirect all console output with light hackery.
            using (var redirector = new ConsoleRedirector(output))
            {
                // XunitPerformanceHarness expects to do the raw commandline parsing itself, but I really don't like that its default collection
                // metric requires the use of ETW. Getting an admin console or admin VS instance isn't where most people start, its
                // a small nuissance, and for these tests its often not needed/adds non-trivial overhead. I set the default to stopwatch if the
                // perf:collect argument hasn't been specified, but that sadly requires that I pre-parse, interpret, and then re-format all the
                // args to make that change :(
                //
                // In TestRun.ValidateMetricNames() I pre-check if ETW is going to be needed and give an error there rather than doing all the
                // test setup (~1 minute?) and then giving the error after the user has probably wandered away. That also relies on some of this
                // replicated command line parsing.
                string[] args = new string[] { "--perf:collect", string.Join("+", run.MetricNames), "--perf:outputdir", run.OutputDir, "--perf:runid", run.BenchviewRunId };
                using (var harness = new XunitPerformanceHarness(args))
                {
                    ProcessStartInfo startInfo = new ProcessStartInfo(run.DotNetInstallation.DotNetExe, ExePath + " " + CommandLineArguments);
                    startInfo.WorkingDirectory       = WorkingDirPath;
                    startInfo.RedirectStandardError  = true;
                    startInfo.RedirectStandardOutput = true;
                    IEnumerable <KeyValuePair <string, string> > extraEnvVars = config.EnvironmentVariables.Concat(EnvironmentVariables).Append(new KeyValuePair <string, string>("DOTNET_MULTILEVEL_LOOKUP", "0"));
                    foreach (KeyValuePair <string, string> kv in extraEnvVars)
                    {
                        startInfo.Environment[kv.Key] = kv.Value;
                    }
                    output.WriteLine("XUnitPerfHarness doesn't log env vars it uses to run processes. To workaround, logging them here:");
                    output.WriteLine(string.Join(", ", extraEnvVars.Select(kv => kv.Key + "=" + kv.Value)));

                    BenchmarkRunResult result = new BenchmarkRunResult(this, config);
                    StringBuilder      stderr = new StringBuilder();
                    StringBuilder      stdout = new StringBuilder();
                    var scenarioConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMinutes(60), startInfo)
                    {
                        //XUnitPerformanceHarness writes files to disk starting with {runid}-{ScenarioBenchmarkName}-{TestName}
                        TestName             = (Name + "-" + config.Name).Replace(' ', '_'),
                        Scenario             = new ScenarioBenchmark("EndToEnd"),
                        Iterations           = run.Iterations,
                        PreIterationDelegate = scenario =>
                        {
                            stderr.Clear();
                            stdout.Clear();
                            scenario.Process.ErrorDataReceived += (object sender, DataReceivedEventArgs errorLine) =>
                            {
                                if (!string.IsNullOrEmpty(errorLine.Data))
                                {
                                    stderr.AppendLine(errorLine.Data);
                                    redirector.WriteLine("STDERROR: " + errorLine.Data);
                                }
                            };
                            scenario.Process.OutputDataReceived += (object sender, DataReceivedEventArgs outputLine) =>
                            {
                                stdout.AppendLine(outputLine.Data);
                                redirector.WriteLine(outputLine.Data);
                            };
                        },
                        PostIterationDelegate = scenarioResult =>
                        {
                            result.IterationResults.Add(RecordIterationMetrics(scenarioResult, stdout.ToString(), stderr.ToString(), redirector));
                        }
                    };
                    harness.RunScenario(scenarioConfiguration, sb => { BenchviewResultExporter.ConvertRunResult(sb, result); });
                    return(result);
                }
            }
        }