Exemplo n.º 1
0
        BenchmarkRunResult MeasureIterations(TestRun run, BenchmarkConfiguration config, ITestOutputHelper output)
        {
            // The XunitPerformanceHarness is hardcoded to log to the console. It would be nice if the output was configurable somehow
            // but in lieue of that we can redirect all console output with light hackery.
            using (var redirector = new ConsoleRedirector(output))
            {
                // XunitPerformanceHarness expects to do the raw commandline parsing itself, but I really don't like that its default collection
                // metric requires the use of ETW. Getting an admin console or admin VS instance isn't where most people start, its
                // a small nuissance, and for these tests its often not needed/adds non-trivial overhead. I set the default to stopwatch if the
                // perf:collect argument hasn't been specified, but that sadly requires that I pre-parse, interpret, and then re-format all the
                // args to make that change :(
                //
                // In TestRun.ValidateMetricNames() I pre-check if ETW is going to be needed and give an error there rather than doing all the
                // test setup (~1 minute?) and then giving the error after the user has probably wandered away. That also relies on some of this
                // replicated command line parsing.
                string[] args = new string[] { "--perf:collect", string.Join("+", run.MetricNames), "--perf:outputdir", run.OutputDir, "--perf:runid", run.BenchviewRunId };
                using (var harness = new XunitPerformanceHarness(args))
                {
                    ProcessStartInfo startInfo = new ProcessStartInfo(run.DotNetInstallation.DotNetExe, (ExePath + " " + CommandLineArguments).Trim());
                    startInfo.WorkingDirectory       = WorkingDirPath;
                    startInfo.RedirectStandardError  = true;
                    startInfo.RedirectStandardOutput = true;
                    IEnumerable <KeyValuePair <string, string> > extraEnvVars = config.EnvironmentVariables.Concat(EnvironmentVariables).Append(new KeyValuePair <string, string>("DOTNET_MULTILEVEL_LOOKUP", "0"));
                    foreach (KeyValuePair <string, string> kv in extraEnvVars)
                    {
                        startInfo.Environment[kv.Key] = kv.Value;
                    }
                    output.WriteLine("XUnitPerfHarness doesn't log env vars it uses to run processes. To workaround, logging them here:");
                    output.WriteLine($"Environment variables: {string.Join(", ", extraEnvVars.Select(kv => kv.Key + "=" + kv.Value))}");
                    output.WriteLine($"Working directory: \"{startInfo.WorkingDirectory}\"");
                    output.WriteLine($"Command line: \"{startInfo.FileName}\" {startInfo.Arguments}");

                    BenchmarkRunResult result = new BenchmarkRunResult(this, config);
                    StringBuilder      stderr = new StringBuilder();
                    StringBuilder      stdout = new StringBuilder();
                    var scenarioConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMinutes(60), startInfo)
                    {
                        //XUnitPerformanceHarness writes files to disk starting with {runid}-{ScenarioBenchmarkName}-{TestName}
                        TestName             = (Name + "-" + config.Name).Replace(' ', '_'),
                        Scenario             = new ScenarioBenchmark("JitBench"),
                        Iterations           = run.Iterations,
                        PreIterationDelegate = scenario =>
                        {
                            stderr.Clear();
                            stdout.Clear();
                            scenario.Process.ErrorDataReceived += (object sender, DataReceivedEventArgs errorLine) =>
                            {
                                if (!string.IsNullOrEmpty(errorLine.Data))
                                {
                                    stderr.AppendLine(errorLine.Data);
                                    redirector.WriteLine("STDERROR: " + errorLine.Data);
                                }
                            };
                            scenario.Process.OutputDataReceived += (object sender, DataReceivedEventArgs outputLine) =>
                            {
                                stdout.AppendLine(outputLine.Data);
                                redirector.WriteLine(outputLine.Data);
                            };
                        },
                        PostIterationDelegate = scenarioResult =>
                        {
                            result.IterationResults.Add(RecordIterationMetrics(scenarioResult, stdout.ToString(), stderr.ToString(), redirector));
                        }
                    };
                    harness.RunScenario(scenarioConfiguration, sb => { BenchviewResultExporter.ConvertRunResult(sb, result); });
                    return(result);
                }
            }
        }
Exemplo n.º 2
0
        public static int Main(string[] args)
        {
            var options = BenchmarkOptions.Parse(args);

            bool benchmarkSpecified = false;

            foreach (Benchmark benchmark in Benchmarks)
            {
                if (options.BenchmarkNames.Contains(benchmark.Name, StringComparer.OrdinalIgnoreCase))
                {
                    benchmark.SetToRun();
                    benchmarkSpecified = true;
                    break;
                }
            }

            var    arguments = new List <string>();
            string runId     = "";

            for (int i = 0; i < args.Length; i++)
            {
                if (string.Compare(args[i], "--perf:runid", true) == 0)
                {
                    if (i + 1 < args.Length)
                    {
                        runId = args[++i];
                    }
                    else
                    {
                        Console.WriteLine("Missing --perf:runid");
                        Console.WriteLine(BenchmarkOptions.Usage());
                        Environment.Exit(1);
                    }
                }
                else
                {
                    arguments.Add(args[i]);
                }
            }

            // Workspace is the ROOT of the coreclr tree.
            // If CORECLR_REPO is not set, the script assumes that the location of sandbox
            // is <path>\coreclr\sandbox.
            LinkBenchRoot = Directory.GetCurrentDirectory();
            Workspace     = Environment.GetEnvironmentVariable("CORECLR_REPO");
            if (Workspace == null)
            {
                Workspace = Directory.GetParent(LinkBenchRoot).FullName;
            }
            if (Workspace == null)
            {
                Console.WriteLine("CORECLR_REPO not found");
                return(-1);
            }

            string linkBenchSrcDir = Workspace + "\\tests\\src\\performance\\linkbench\\";

            ScriptDir = linkBenchSrcDir + "scripts\\";
            AssetsDir = linkBenchSrcDir + "assets\\";

            Environment.SetEnvironmentVariable("LinkBenchRoot", LinkBenchRoot);
            Environment.SetEnvironmentVariable("__dotnet", LinkBenchRoot + "\\.Net\\dotnet.exe");
            Environment.SetEnvironmentVariable("__dotnet2", LinkBenchRoot + "\\.Net2\\dotnet.exe");


            PrintHeader("Update the build files to facilitate the link step.");
            if (options.DoSetup)
            {
                PrintHeader("Clone the benchmarks.");
                using (var setup = new Process())
                {
                    setup.StartInfo.FileName = ScriptDir + "clone.cmd";
                    setup.Start();
                    setup.WaitForExit();
                    if (setup.ExitCode != 0)
                    {
                        PrintHeader("Benchmark Setup failed");
                        return(-2);
                    }
                }

                PrintHeader("Setup the benchmarks.");
                foreach (Benchmark benchmark in Benchmarks)
                {
                    if (benchmark.doRun && benchmark.Setup != null)
                    {
                        benchmark.Setup();
                    }
                }
            }

            if (options.DoBuild)
            {
                // Run the setup Script, which clones, builds and links the benchmarks.
                using (var setup = new Process())
                {
                    setup.StartInfo.FileName  = ScriptDir + "build.cmd";
                    setup.StartInfo.Arguments = AssetsDir;
                    setup.Start();
                    setup.WaitForExit();
                    if (setup.ExitCode != 0)
                    {
                        Console.WriteLine("Benchmark build failed");
                        return(-3);
                    }
                }
            }

            // Since this is a size measurement scenario, there are no iterations
            // to perform. So, create a process that does nothing, to satisfy XUnit.
            // All size measurements are performed PostRun()
            var emptyCmd = new ProcessStartInfo()
            {
                FileName = ScriptDir + "empty.cmd"
            };

            for (int i = 0; i < Benchmarks.Length; i++)
            {
                CurrentBenchmark = Benchmarks[i];
                if (!CurrentBenchmark.doRun)
                {
                    continue;
                }

                var newArgs = new List <string>(arguments);
                newArgs.AddRange(new[] { "--perf:runid", $"{runId}-{CurrentBenchmark.Name}", });
                Console.WriteLine($"{string.Join(" ", newArgs)}");
                using (var h = new XunitPerformanceHarness(newArgs.ToArray()))
                {
                    var configuration = new ScenarioTestConfiguration(new TimeSpan(2000000), emptyCmd)
                    {
                        Scenario = new ScenarioBenchmark(CurrentBenchmark.Name)
                        {
                            Namespace = "LinkBench"
                        },
                    };
                    h.RunScenario(configuration, PostRun);
                }
            }

            return(0);
        }
Exemplo n.º 3
0
        static void Main(string[] args)
        {
            // The flag below is set to false to prevent the VBCSCompiler.exe hanging around
            // after the performance execution finished and preventing the deletion of the folder.
            Environment.SetEnvironmentVariable("UseSharedCompilation", "false");

            var options = JitBenchHarnessOptions.Parse(args);

            SetupStatics(options);

            using (var h = new XunitPerformanceHarness(args))
            {
                ProcessStartInfo startInfo = options.UseExistingSetup ? UseExistingSetup() : CreateNewSetup();

                string scenarioName = "MusicStore";
                if (!startInfo.Environment.ContainsKey("DOTNET_MULTILEVEL_LOOKUP"))
                {
                    throw new InvalidOperationException("DOTNET_MULTILEVEL_LOOKUP was not defined.");
                }
                if (startInfo.Environment["DOTNET_MULTILEVEL_LOOKUP"] != "0")
                {
                    throw new InvalidOperationException("DOTNET_MULTILEVEL_LOOKUP was not set to 0.");
                }

                if (options.EnableTiering)
                {
                    startInfo.Environment.Add("COMPlus_EXPERIMENTAL_TieredCompilation", "1");
                    scenarioName += " Tiering";
                }
                if (options.Minopts)
                {
                    startInfo.Environment.Add("COMPlus_JITMinOpts", "1");
                    scenarioName += " Minopts";
                }

                if (options.DisableR2R)
                {
                    startInfo.Environment.Add("COMPlus_ReadyToRun", "0");
                    scenarioName += " NoR2R";
                }

                if (options.DisableNgen)
                {
                    startInfo.Environment.Add("COMPlus_ZapDisable", "1");
                    scenarioName += " NoNgen";
                }

                PrintHeader($"Running scenario '{scenarioName}'");

                var program = new JitBenchHarness();
                try
                {
                    var scenarioConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMilliseconds(60000), startInfo)
                    {
                        Iterations            = (int)options.Iterations,
                        PreIterationDelegate  = program.PreIteration,
                        PostIterationDelegate = program.PostIteration,
                        Scenario = new ScenarioBenchmark("JitBench"),
                    };
                    var processesOfInterest = new string[] {
                        "dotnet.exe",
                    };
                    var modulesOfInterest = new string[] {
                        "Anonymously Hosted DynamicMethods Assembly",
                        "clrjit.dll",
                        "coreclr.dll",
                        "dotnet.exe",
                        "MusicStore.dll",
                        "ntoskrnl.exe",
                        "System.Private.CoreLib.dll",
                        "Unknown",
                    };

                    if (!File.Exists(startInfo.FileName))
                    {
                        throw new FileNotFoundException(startInfo.FileName);
                    }
                    if (!Directory.Exists(startInfo.WorkingDirectory))
                    {
                        throw new DirectoryNotFoundException(startInfo.WorkingDirectory);
                    }

                    h.RunScenario(scenarioConfiguration, teardownDelegate: (ScenarioBenchmark scenarioBenchmark) =>
                    {
                        program.PostRun(scenarioBenchmark, "MusicStore", processesOfInterest, modulesOfInterest);
                    });
                }
                catch
                {
                    Console.WriteLine(program.StandardOutput);
                    Console.WriteLine(program.StandardError);
                    throw;
                }
            }
        }
Exemplo n.º 4
0
        private static void TestDir(XunitPerformanceHarness harness)
        {
            string commandName = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "dir" : "ls";

            var testModel = new ScenarioTestModel(commandName);

            testModel.Performance.Metrics.Add(new MetricModel
            {
                Name        = "ExecutionTime",
                DisplayName = "Execution Time",
                Unit        = "ms"
            });

            void PreIteration(ScenarioTest scenarioTest)
            {
            }

            void PostIteration(ScenarioExecutionResult scenarioExecutionResult)
            {
                var elapsed = scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime;

                var iteration = new IterationModel
                {
                    Iteration = new Dictionary <string, double>()
                };

                iteration.Iteration.Add(testModel.Performance.Metrics[0].Name, elapsed.TotalMilliseconds);
                testModel.Performance.IterationModels.Add(iteration);
            }

            void PostRun(ScenarioBenchmark scenario)
            {
            }

            ProcessStartInfo processToMeasure;

            if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
            {
                processToMeasure = new ProcessStartInfo("cmd.exe", $"/c {commandName}");
            }
            else
            {
                processToMeasure = new ProcessStartInfo(commandName);
            }

            processToMeasure.RedirectStandardError  = true;
            processToMeasure.RedirectStandardOutput = true;

            var scenarioTestConfiguration = new ScenarioTestConfiguration(Timeout, processToMeasure)
            {
                Iterations            = Iterations,
                PreIterationDelegate  = PreIteration,
                PostIterationDelegate = PostIteration,
                Scenario = new ScenarioBenchmark("ExecuteCommand")
            };

            scenarioTestConfiguration.Scenario.Tests.Add(testModel);
            scenarioTestConfiguration.TestName = commandName;

            harness.RunScenario(scenarioTestConfiguration, PostRun);
        }
Exemplo n.º 5
0
        public void Run([CallerMemberName] string callerName = null)
        {
            //  Handle case where we're running inside VS (or via dotnet test), so the Main method hasn't run to initialize the perf harness
            //  In the future we may want to do this via an xUnit fixture, which would also let us call the dispose method to write the results
            //  afterwards
            if (_performanceHarness == null)
            {
                Program.HandlePerfArgs(new List <string>()
                {
                    "--iterations",
                    "1"
                });
            }

            TestName = TestName ?? callerName;
            int currentIteration  = 0;
            var durationTestModel = new ScenarioTestModel(TestName);

            durationTestModel.Performance.Metrics.Add(new MetricModel
            {
                Name        = "ExecutionTime",
                DisplayName = "Execution Time",
                Unit        = "ms"
            });

            string testIdentifier = _performanceHarness.Configuration.RunId + "-" + ScenarioName + " - " + TestName;

            string testResultsFolder = Path.Combine(_performanceHarness.OutputDirectory, testIdentifier + "-traces");

            if (!Directory.Exists(testResultsFolder))
            {
                Directory.CreateDirectory(testResultsFolder);
            }

            using (FolderSnapshot snapshot = FolderSnapshot.Create(TestFolder))
            {
                void PreIteration(ScenarioTest scenarioTest)
                {
                    if (currentIteration > 0)
                    {
                        snapshot.Restore();
                    }

                    //  TODO: Optionally kill processes such as MSBuild.exe and VBCSCompiler.exe
                    //  We should always do this before the first iteration, but it should be configurable whether we
                    //  do it between iterations.  This is because when testing "warm" / incremental builds, we would
                    //  expect the persistent processes to already be running and have already built the project
                }

                void PostIteration(ScenarioExecutionResult scenarioExecutionResult)
                {
                    var elapsed = scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime;

                    var durationIteration = new IterationModel
                    {
                        Iteration = new Dictionary <string, double>()
                    };

                    durationIteration.Iteration.Add(durationTestModel.Performance.Metrics[0].Name, elapsed.TotalMilliseconds);
                    durationTestModel.Performance.IterationModels.Add(durationIteration);

                    if (GetPerformanceSummary)
                    {
                        string performanceSummaryFileDestination = Path.Combine(testResultsFolder, $"{testIdentifier}({currentIteration}).txt");
                        File.Move(Path.Combine(TestFolder, "PerformanceSummary.txt"), performanceSummaryFileDestination);
                    }
                    if (GetBinLog)
                    {
                        string binlogDestination = Path.Combine(testResultsFolder, $"{testIdentifier}({currentIteration}).binlog");
                        File.Move(Path.Combine(TestFolder, "msbuild.binlog"), binlogDestination);
                    }

                    currentIteration++;
                }

                void PostRun(ScenarioBenchmark scenario)
                {
                }

                if (GetPerformanceSummary)
                {
                    ProcessToMeasure.Arguments += " /flp9:PerformanceSummary;v=q;logfile=\"" + Path.Combine(TestFolder, "PerformanceSummary.txt") + "\"";
                }
                if (GetBinLog)
                {
                    ProcessToMeasure.Arguments += " /bl:\"" + Path.Combine(TestFolder, "msbuild.binlog") + "\"";
                }

                var scenarioTestConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMilliseconds(Timeout.TotalMilliseconds), ProcessToMeasure);
                scenarioTestConfiguration.Iterations            = NumberOfIterations;
                scenarioTestConfiguration.PreIterationDelegate  = PreIteration;
                scenarioTestConfiguration.PostIterationDelegate = PostIteration;
                scenarioTestConfiguration.SaveResults           = false;
                scenarioTestConfiguration.Scenario = GetScenarioBenchmark(ScenarioName ?? TestName);
                scenarioTestConfiguration.Scenario.Tests.Add(durationTestModel);
                scenarioTestConfiguration.TestName = TestName;

                _performanceHarness.RunScenario(scenarioTestConfiguration, PostRun);
            }
        }