コード例 #1
0
        private static ScenarioBenchmark PostProcessing()
        {
            PrintHeader("Starting POST");

            var scenarioBenchmark = new ScenarioBenchmark("MusicStore")
            {
                Namespace = "JitBench"
            };

            // Create (measured) test entries for this scenario.
            var startup = new ScenarioTestModel("Startup");

            scenarioBenchmark.Tests.Add(startup);

            var request = new ScenarioTestModel("First Request");

            scenarioBenchmark.Tests.Add(request);

            // TODO: add response time once jit bench is updated to
            // report more reasonable numbers.

            // Add measured metrics to each test.
            startup.Performance.Metrics.Add(new MetricModel {
                Name        = "Duration",
                DisplayName = "Duration",
                Unit        = "ms"
            });
            request.Performance.Metrics.Add(new MetricModel {
                Name        = "Duration",
                DisplayName = "Duration",
                Unit        = "ms"
            });

            for (int i = 0; i < s_iterations; ++i)
            {
                var startupIteration = new IterationModel {
                    Iteration = new Dictionary <string, double>()
                };
                startupIteration.Iteration.Add("Duration", s_startupTimes[i]);
                startup.Performance.IterationModels.Add(startupIteration);

                var requestIteration = new IterationModel {
                    Iteration = new Dictionary <string, double>()
                };
                requestIteration.Iteration.Add("Duration", s_requestTimes[i]);
                request.Performance.IterationModels.Add(requestIteration);
            }

            return(scenarioBenchmark);
        }
コード例 #2
0
ファイル: linkbench.cs プロジェクト: zodsoft/coreclr
        private static void addMeasurement(ref ScenarioBenchmark scenario, string name, MetricModel metric, double value)
        {
            var iteration = new IterationModel
            {
                Iteration = new Dictionary <string, double>()
            };

            iteration.Iteration.Add(metric.Name, value);

            var size = new ScenarioTestModel(name);

            size.Performance.Metrics.Add(metric);
            size.Performance.IterationModels.Add(iteration);
            scenario.Tests.Add(size);
        }
コード例 #3
0
        static ScenarioTestModel ConvertRunResult(BenchmarkRunResult runResult, IEnumerable <Metric> metrics, Func <Metric, Metric> metricMapping, string scenarioModelNamespace, string scenarioModelName)
        {
            var testModel = new ScenarioTestModel(scenarioModelName);

            testModel.Namespace                   = scenarioModelNamespace;
            testModel.Performance                 = new PerformanceModel();
            testModel.Performance.Metrics         = new List <MetricModel>();
            testModel.Performance.IterationModels = new List <IterationModel>();
            foreach (var iterationResult in runResult.IterationResults)
            {
                testModel.Performance.IterationModels.Add(ConvertIterationResult(iterationResult, metricMapping));
            }
            foreach (var metric in metrics)
            {
                testModel.Performance.Metrics.Add(new MetricModel()
                {
                    DisplayName = metric.Name,
                    Name        = metric.Name,
                    Unit        = metric.Unit
                });
            }
            return(testModel);
        }
コード例 #4
0
ファイル: JitBenchHarness.cs プロジェクト: Drawaes/coreclr
        private static ScenarioBenchmark AddEtwData(
            ScenarioBenchmark scenarioBenchmark,
            ScenarioExecutionResult scenarioExecutionResult,
            IReadOnlyCollection <string> processesOfInterest,
            IReadOnlyCollection <string> modulesOfInterest)
        {
            var metricModels = scenarioExecutionResult.PerformanceMonitorCounters
                               .Select(pmc => new MetricModel {
                DisplayName = pmc.DisplayName,
                Name        = pmc.Name,
                Unit        = pmc.Unit,
            });

            // Get the list of processes of interest.
            Console.WriteLine($"Parsing: {scenarioExecutionResult.EventLogFileName}");
            var processes = new SimpleTraceEventParser().GetProfileData(scenarioExecutionResult);

            // Extract the Pmc data for each one of the processes.
            foreach (var process in processes)
            {
                if (!processesOfInterest.Any(p => p.Equals(process.Name, StringComparison.OrdinalIgnoreCase)))
                {
                    continue;
                }

                var processTest = scenarioBenchmark.Tests
                                  .SingleOrDefault(t => t.Name == process.Name && t.Namespace == "");
                if (processTest == null)
                {
                    processTest = new ScenarioTestModel(process.Name)
                    {
                        Namespace = "",
                    };
                    scenarioBenchmark.Tests.Add(processTest);

                    // Add metrics definitions.
                    processTest.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                    processTest.Performance.Metrics.AddRange(metricModels);
                }

                var processIterationModel = new IterationModel {
                    Iteration = new Dictionary <string, double>()
                };
                processTest.Performance.IterationModels.Add(processIterationModel);

                processIterationModel.Iteration.Add(
                    ElapsedTimeMilliseconds.Name, process.LifeSpan.Duration.TotalMilliseconds);

                // Add process metrics values.
                foreach (var pmcData in process.PerformanceMonitorCounterData)
                {
                    processIterationModel.Iteration.Add(pmcData.Key.Name, pmcData.Value);
                }

                foreach (var module in process.Modules)
                {
                    var moduleName = Path.GetFileName(module.FullName);
                    if (modulesOfInterest.Any(m => m.Equals(moduleName, StringComparison.OrdinalIgnoreCase)))
                    {
                        var moduleTestName = $"{moduleName}";
                        var moduleTest     = scenarioBenchmark.Tests
                                             .SingleOrDefault(t => t.Name == moduleTestName && t.Namespace == process.Name);

                        if (moduleTest == null)
                        {
                            moduleTest = new ScenarioTestModel(moduleTestName)
                            {
                                Namespace = process.Name,
                                Separator = "!",
                            };
                            scenarioBenchmark.Tests.Add(moduleTest);

                            // Add metrics definitions.
                            moduleTest.Performance.Metrics.AddRange(metricModels);
                        }

                        var moduleIterationModel = new IterationModel {
                            Iteration = new Dictionary <string, double>()
                        };
                        moduleTest.Performance.IterationModels.Add(moduleIterationModel);

                        // 5. Add module metrics values.
                        foreach (var pmcData in module.PerformanceMonitorCounterData)
                        {
                            moduleIterationModel.Iteration.Add(pmcData.Key.Name, pmcData.Value);
                        }
                    }
                }
            }

            return(scenarioBenchmark);
        }
コード例 #5
0
ファイル: JitBenchHarness.cs プロジェクト: Drawaes/coreclr
        private ScenarioBenchmark PostRun(
            string scenarioTestModelName,
            IReadOnlyCollection <string> processesOfInterest,
            IReadOnlyCollection <string> modulesOfInterest)
        {
            PrintHeader("Post-Processing scenario data.");

            var scenarioBenchmark = new ScenarioBenchmark(_scenarioBenchmarkName);

            foreach (var iter in IterationsData)
            {
                var scenarioExecutionResult = iter.ScenarioExecutionResult;
                var scenarioTestModel       = scenarioBenchmark.Tests
                                              .SingleOrDefault(t => t.Name == scenarioTestModelName);

                if (scenarioTestModel == null)
                {
                    scenarioTestModel = new ScenarioTestModel(scenarioTestModelName);
                    scenarioBenchmark.Tests.Add(scenarioTestModel);

                    // Add measured metrics to each test.
                    scenarioTestModel.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                }

                scenarioTestModel.Performance.IterationModels.Add(new IterationModel {
                    Iteration = new Dictionary <string, double> {
                        { ElapsedTimeMilliseconds.Name, (scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime).TotalMilliseconds },
                    }
                });

                // Create (measured) test entries for this scenario.
                var startup = scenarioBenchmark.Tests
                              .SingleOrDefault(t => t.Name == "Startup" && t.Namespace == scenarioTestModel.Name);
                if (startup == null)
                {
                    startup = new ScenarioTestModel("Startup")
                    {
                        Namespace = scenarioTestModel.Name,
                    };
                    scenarioBenchmark.Tests.Add(startup);

                    // Add measured metrics to each test.
                    startup.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                }

                var firstRequest = scenarioBenchmark.Tests
                                   .SingleOrDefault(t => t.Name == "First Request" && t.Namespace == scenarioTestModel.Name);
                if (firstRequest == null)
                {
                    firstRequest = new ScenarioTestModel("First Request")
                    {
                        Namespace = scenarioTestModel.Name,
                    };
                    scenarioBenchmark.Tests.Add(firstRequest);

                    // Add measured metrics to each test.
                    firstRequest.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                }

                startup.Performance.IterationModels.Add(new IterationModel {
                    Iteration = new Dictionary <string, double> {
                        { ElapsedTimeMilliseconds.Name, iter.StartupTime },
                    },
                });

                firstRequest.Performance.IterationModels.Add(new IterationModel {
                    Iteration = new Dictionary <string, double> {
                        { ElapsedTimeMilliseconds.Name, iter.FirstRequestTime },
                    },
                });

                if (!string.IsNullOrWhiteSpace(iter.ScenarioExecutionResult.EventLogFileName) &&
                    File.Exists(iter.ScenarioExecutionResult.EventLogFileName))
                {
                    // Adding ETW data.
                    scenarioBenchmark = AddEtwData(
                        scenarioBenchmark, iter.ScenarioExecutionResult, processesOfInterest, modulesOfInterest);
                }
            }

            for (int i = scenarioBenchmark.Tests.Count - 1; i >= 0; i--)
            {
                if (scenarioBenchmark.Tests[i].Performance.IterationModels.All(iter => iter.Iteration.Count == 0))
                {
                    scenarioBenchmark.Tests.RemoveAt(i);
                }
            }

            return(scenarioBenchmark);
        }
コード例 #6
0
ファイル: Program.cs プロジェクト: nategraf/xunit-performance
        private static void TestDir(XunitPerformanceHarness harness)
        {
            string commandName = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "dir" : "ls";

            var testModel = new ScenarioTestModel(commandName);

            testModel.Performance.Metrics.Add(new MetricModel
            {
                Name        = "ExecutionTime",
                DisplayName = "Execution Time",
                Unit        = "ms"
            });

            void PreIteration(ScenarioTest scenarioTest)
            {
            }

            void PostIteration(ScenarioExecutionResult scenarioExecutionResult)
            {
                var elapsed = scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime;

                var iteration = new IterationModel
                {
                    Iteration = new Dictionary <string, double>()
                };

                iteration.Iteration.Add(testModel.Performance.Metrics[0].Name, elapsed.TotalMilliseconds);
                testModel.Performance.IterationModels.Add(iteration);
            }

            void PostRun(ScenarioBenchmark scenario)
            {
            }

            ProcessStartInfo processToMeasure;

            if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
            {
                processToMeasure = new ProcessStartInfo("cmd.exe", $"/c {commandName}");
            }
            else
            {
                processToMeasure = new ProcessStartInfo(commandName);
            }

            processToMeasure.RedirectStandardError  = true;
            processToMeasure.RedirectStandardOutput = true;

            var scenarioTestConfiguration = new ScenarioTestConfiguration(Timeout, processToMeasure)
            {
                Iterations            = Iterations,
                PreIterationDelegate  = PreIteration,
                PostIterationDelegate = PostIteration,
                Scenario = new ScenarioBenchmark("ExecuteCommand")
            };

            scenarioTestConfiguration.Scenario.Tests.Add(testModel);
            scenarioTestConfiguration.TestName = commandName;

            harness.RunScenario(scenarioTestConfiguration, PostRun);
        }
コード例 #7
0
ファイル: PerfTest.cs プロジェクト: Fredo-Q/dotnet-sdk
        public void Run([CallerMemberName] string callerName = null)
        {
            //  Handle case where we're running inside VS (or via dotnet test), so the Main method hasn't run to initialize the perf harness
            //  In the future we may want to do this via an xUnit fixture, which would also let us call the dispose method to write the results
            //  afterwards
            if (_performanceHarness == null)
            {
                Program.HandlePerfArgs(new List <string>()
                {
                    "--iterations",
                    "1"
                });
            }

            TestName = TestName ?? callerName;
            int currentIteration  = 0;
            var durationTestModel = new ScenarioTestModel(TestName);

            durationTestModel.Performance.Metrics.Add(new MetricModel
            {
                Name        = "ExecutionTime",
                DisplayName = "Execution Time",
                Unit        = "ms"
            });

            string testIdentifier = _performanceHarness.Configuration.RunId + "-" + ScenarioName + " - " + TestName;

            string testResultsFolder = Path.Combine(_performanceHarness.OutputDirectory, testIdentifier + "-traces");

            if (!Directory.Exists(testResultsFolder))
            {
                Directory.CreateDirectory(testResultsFolder);
            }

            using (FolderSnapshot snapshot = FolderSnapshot.Create(TestFolder))
            {
                void PreIteration(ScenarioTest scenarioTest)
                {
                    if (currentIteration > 0)
                    {
                        snapshot.Restore();
                    }

                    //  TODO: Optionally kill processes such as MSBuild.exe and VBCSCompiler.exe
                    //  We should always do this before the first iteration, but it should be configurable whether we
                    //  do it between iterations.  This is because when testing "warm" / incremental builds, we would
                    //  expect the persistent processes to already be running and have already built the project
                }

                void PostIteration(ScenarioExecutionResult scenarioExecutionResult)
                {
                    var elapsed = scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime;

                    var durationIteration = new IterationModel
                    {
                        Iteration = new Dictionary <string, double>()
                    };

                    durationIteration.Iteration.Add(durationTestModel.Performance.Metrics[0].Name, elapsed.TotalMilliseconds);
                    durationTestModel.Performance.IterationModels.Add(durationIteration);

                    if (GetPerformanceSummary)
                    {
                        string performanceSummaryFileDestination = Path.Combine(testResultsFolder, $"{testIdentifier}({currentIteration}).txt");
                        File.Move(Path.Combine(TestFolder, "PerformanceSummary.txt"), performanceSummaryFileDestination);
                    }
                    if (GetBinLog)
                    {
                        string binlogDestination = Path.Combine(testResultsFolder, $"{testIdentifier}({currentIteration}).binlog");
                        File.Move(Path.Combine(TestFolder, "msbuild.binlog"), binlogDestination);
                    }

                    currentIteration++;
                }

                void PostRun(ScenarioBenchmark scenario)
                {
                }

                if (GetPerformanceSummary)
                {
                    ProcessToMeasure.Arguments += " /flp9:PerformanceSummary;v=q;logfile=\"" + Path.Combine(TestFolder, "PerformanceSummary.txt") + "\"";
                }
                if (GetBinLog)
                {
                    ProcessToMeasure.Arguments += " /bl:\"" + Path.Combine(TestFolder, "msbuild.binlog") + "\"";
                }

                var scenarioTestConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMilliseconds(Timeout.TotalMilliseconds), ProcessToMeasure);
                scenarioTestConfiguration.Iterations            = NumberOfIterations;
                scenarioTestConfiguration.PreIterationDelegate  = PreIteration;
                scenarioTestConfiguration.PostIterationDelegate = PostIteration;
                scenarioTestConfiguration.SaveResults           = false;
                scenarioTestConfiguration.Scenario = GetScenarioBenchmark(ScenarioName ?? TestName);
                scenarioTestConfiguration.Scenario.Tests.Add(durationTestModel);
                scenarioTestConfiguration.TestName = TestName;

                _performanceHarness.RunScenario(scenarioTestConfiguration, PostRun);
            }
        }