Example #1
0
        List <ResultTableRowModel> BuildRowModels(Func <Benchmark, Metric, bool> primaryMetricSelector)
        {
            List <ResultTableRowModel> rows = new List <ResultTableRowModel>();

            foreach (Benchmark benchmark in Benchmarks)
            {
                BenchmarkRunResult canonResult = BenchmarkRunResults.Where(r => r.Benchmark == benchmark).FirstOrDefault();
                if (canonResult == null || canonResult.IterationResults == null || canonResult.IterationResults.Count == 0)
                {
                    continue;
                }
                IterationResult canonIteration = canonResult.IterationResults[0];
                foreach (Metric metric in canonIteration.Measurements.Keys)
                {
                    if (primaryMetricSelector(benchmark, metric))
                    {
                        rows.Add(new ResultTableRowModel()
                        {
                            Benchmark = benchmark, Metric = metric
                        });
                    }
                }
            }
            return(rows);
        }
        static List <Metric> CollectMetrics(BenchmarkRunResult runResult)
        {
            List <Metric> metrics = new List <Metric>();

            foreach (IterationResult iterationResult in runResult.IterationResults)
            {
                foreach (KeyValuePair <Metric, double> measurement in iterationResult.Measurements)
                {
                    if (!metrics.Contains(measurement.Key))
                    {
                        metrics.Add(measurement.Key);
                    }
                }
            }
            return(metrics);
        }
        static ScenarioTestModel[] ConvertRunResult(BenchmarkRunResult runResult)
        {
            List <ScenarioTestModel> testModels = new List <ScenarioTestModel>();
            string        name    = runResult.Benchmark.Name;
            List <Metric> metrics = CollectMetrics(runResult);

            foreach (Metric m in metrics.ToArray())
            {
                if (runResult.Benchmark.TryGetBenchviewCustomMetricReporting(m, out Metric newMetric, out string newScenarioModelName))
                {
                    metrics.Remove(m);
                    testModels.Add(ConvertRunResult(runResult, new Metric[] { newMetric }, oldMetric => m.Equals(oldMetric) ? newMetric : default(Metric), name, newScenarioModelName));
                }
            }
            testModels.Insert(0, ConvertRunResult(runResult, metrics, oldMetric => metrics.Contains(oldMetric) ? oldMetric : default(Metric), null, name));
            return(testModels.ToArray());
        }
        static ScenarioTestModel ConvertRunResult(BenchmarkRunResult runResult, IEnumerable <Metric> metrics, Func <Metric, Metric> metricMapping, string scenarioModelNamespace, string scenarioModelName)
        {
            var testModel = new ScenarioTestModel(scenarioModelName);

            testModel.Namespace                   = scenarioModelNamespace;
            testModel.Performance                 = new PerformanceModel();
            testModel.Performance.Metrics         = new List <MetricModel>();
            testModel.Performance.IterationModels = new List <IterationModel>();
            foreach (var iterationResult in runResult.IterationResults)
            {
                testModel.Performance.IterationModels.Add(ConvertIterationResult(iterationResult, metricMapping));
            }
            foreach (var metric in metrics)
            {
                testModel.Performance.Metrics.Add(new MetricModel()
                {
                    DisplayName = metric.Name,
                    Name        = metric.Name,
                    Unit        = metric.Unit
                });
            }
            return(testModel);
        }
 public static void ConvertRunResult(ScenarioBenchmark scenario, BenchmarkRunResult runResult)
 {
     scenario.Tests = new List <ScenarioTestModel>();
     scenario.Tests.AddRange(ConvertRunResult(runResult));
 }
Example #6
0
        BenchmarkRunResult MeasureIterations(TestRun run, BenchmarkConfiguration config, ITestOutputHelper output)
        {
            // The XunitPerformanceHarness is hardcoded to log to the console. It would be nice if the output was configurable somehow
            // but in lieue of that we can redirect all console output with light hackery.
            using (var redirector = new ConsoleRedirector(output))
            {
                // XunitPerformanceHarness expects to do the raw commandline parsing itself, but I really don't like that its default collection
                // metric requires the use of ETW. Getting an admin console or admin VS instance isn't where most people start, its
                // a small nuissance, and for these tests its often not needed/adds non-trivial overhead. I set the default to stopwatch if the
                // perf:collect argument hasn't been specified, but that sadly requires that I pre-parse, interpret, and then re-format all the
                // args to make that change :(
                //
                // In TestRun.ValidateMetricNames() I pre-check if ETW is going to be needed and give an error there rather than doing all the
                // test setup (~1 minute?) and then giving the error after the user has probably wandered away. That also relies on some of this
                // replicated command line parsing.
                string[] args = new string[] { "--perf:collect", string.Join("+", run.MetricNames), "--perf:outputdir", run.OutputDir, "--perf:runid", run.BenchviewRunId };
                using (var harness = new XunitPerformanceHarness(args))
                {
                    ProcessStartInfo startInfo = new ProcessStartInfo(run.DotNetInstallation.DotNetExe, ExePath + " " + CommandLineArguments);
                    startInfo.WorkingDirectory       = WorkingDirPath;
                    startInfo.RedirectStandardError  = true;
                    startInfo.RedirectStandardOutput = true;
                    IEnumerable <KeyValuePair <string, string> > extraEnvVars = config.EnvironmentVariables.Concat(EnvironmentVariables).Append(new KeyValuePair <string, string>("DOTNET_MULTILEVEL_LOOKUP", "0"));
                    foreach (KeyValuePair <string, string> kv in extraEnvVars)
                    {
                        startInfo.Environment[kv.Key] = kv.Value;
                    }
                    output.WriteLine("XUnitPerfHarness doesn't log env vars it uses to run processes. To workaround, logging them here:");
                    output.WriteLine(string.Join(", ", extraEnvVars.Select(kv => kv.Key + "=" + kv.Value)));

                    BenchmarkRunResult result = new BenchmarkRunResult(this, config);
                    StringBuilder      stderr = new StringBuilder();
                    StringBuilder      stdout = new StringBuilder();
                    var scenarioConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMinutes(60), startInfo)
                    {
                        //XUnitPerformanceHarness writes files to disk starting with {runid}-{ScenarioBenchmarkName}-{TestName}
                        TestName             = (Name + "-" + config.Name).Replace(' ', '_'),
                        Scenario             = new ScenarioBenchmark("EndToEnd"),
                        Iterations           = run.Iterations,
                        PreIterationDelegate = scenario =>
                        {
                            stderr.Clear();
                            stdout.Clear();
                            scenario.Process.ErrorDataReceived += (object sender, DataReceivedEventArgs errorLine) =>
                            {
                                if (!string.IsNullOrEmpty(errorLine.Data))
                                {
                                    stderr.AppendLine(errorLine.Data);
                                    redirector.WriteLine("STDERROR: " + errorLine.Data);
                                }
                            };
                            scenario.Process.OutputDataReceived += (object sender, DataReceivedEventArgs outputLine) =>
                            {
                                stdout.AppendLine(outputLine.Data);
                                redirector.WriteLine(outputLine.Data);
                            };
                        },
                        PostIterationDelegate = scenarioResult =>
                        {
                            result.IterationResults.Add(RecordIterationMetrics(scenarioResult, stdout.ToString(), stderr.ToString(), redirector));
                        }
                    };
                    harness.RunScenario(scenarioConfiguration, sb => { BenchviewResultExporter.ConvertRunResult(sb, result); });
                    return(result);
                }
            }
        }