Ejemplo n.º 1
0
        public static List <Test> GenerateTestsFromXml(StreamReader stream)
        {
            XmlSerializer serializer = new XmlSerializer(typeof(ScenarioBenchmark));

            ScenarioBenchmark scenarioBenchmark = (ScenarioBenchmark)serializer.Deserialize(stream);

            Console.WriteLine(scenarioBenchmark.Name);
            List <Test> tests = new List <Test>();

            foreach (ScenarioBenchmarkTest scenarioBenchmarkTest in scenarioBenchmark.Tests)
            {
                Test test = new Test();
                test.Categories.Add("DotnetCoreSdk");
                test.Name = scenarioBenchmark.Name + "." + scenarioBenchmarkTest.Name;
                test.Counters.Add(new Counter
                {
                    Name           = scenarioBenchmarkTest.Performance.metrics.ExecutionTime.displayName,
                    TopCounter     = true,
                    DefaultCounter = true,
                    HigherIsBetter = false,
                    MetricName     = scenarioBenchmarkTest.Performance.metrics.ExecutionTime.unit,
                    Results        = scenarioBenchmarkTest
                                     .Performance
                                     .iterations
                                     .Select(i => decimal.ToDouble(i.ExecutionTime))
                                     .ToList()
                });

                tests.Add(test);
            }

            return(tests);
        }
Ejemplo n.º 2
0
        static ScenarioBenchmark GetScenarioBenchmark(string name)
        {
            ScenarioBenchmark scenario;

            if (_scenarios.TryGetValue(name, out scenario))
            {
                return(scenario);
            }

            scenario         = new ScenarioBenchmark(name);
            _scenarios[name] = scenario;
            return(scenario);
        }
Ejemplo n.º 3
0
        private static void PostRun(ScenarioBenchmark scenario)
        {
            // The XUnit output doesn't print the benchmark name, so print it now.
            Console.WriteLine("{0}", CurrentBenchmark.Name);

            CurrentBenchmark.Compute();

            addMeasurement(ref scenario, "MSIL Unlinked", SizeMetric, CurrentBenchmark.UnlinkedMsilSize);
            addMeasurement(ref scenario, "MSIL Linked", SizeMetric, CurrentBenchmark.LinkedMsilSize);
            addMeasurement(ref scenario, "MSIL Reduction", PercMetric, CurrentBenchmark.MsilSizeReduction);
            addMeasurement(ref scenario, "Total Uninked", SizeMetric, CurrentBenchmark.UnlinkedDirSize);
            addMeasurement(ref scenario, "Total Linked", SizeMetric, CurrentBenchmark.LinkedDirSize);
            addMeasurement(ref scenario, "Total Reduction", PercMetric, CurrentBenchmark.DirSizeReduction);
        }
Ejemplo n.º 4
0
        private static ScenarioBenchmark PostProcessing()
        {
            PrintHeader("Starting POST");

            var scenarioBenchmark = new ScenarioBenchmark("MusicStore")
            {
                Namespace = "JitBench"
            };

            // Create (measured) test entries for this scenario.
            var startup = new ScenarioTestModel("Startup");

            scenarioBenchmark.Tests.Add(startup);

            var request = new ScenarioTestModel("First Request");

            scenarioBenchmark.Tests.Add(request);

            // TODO: add response time once jit bench is updated to
            // report more reasonable numbers.

            // Add measured metrics to each test.
            startup.Performance.Metrics.Add(new MetricModel {
                Name        = "Duration",
                DisplayName = "Duration",
                Unit        = "ms"
            });
            request.Performance.Metrics.Add(new MetricModel {
                Name        = "Duration",
                DisplayName = "Duration",
                Unit        = "ms"
            });

            for (int i = 0; i < s_iterations; ++i)
            {
                var startupIteration = new IterationModel {
                    Iteration = new Dictionary <string, double>()
                };
                startupIteration.Iteration.Add("Duration", s_startupTimes[i]);
                startup.Performance.IterationModels.Add(startupIteration);

                var requestIteration = new IterationModel {
                    Iteration = new Dictionary <string, double>()
                };
                requestIteration.Iteration.Add("Duration", s_requestTimes[i]);
                request.Performance.IterationModels.Add(requestIteration);
            }

            return(scenarioBenchmark);
        }
Ejemplo n.º 5
0
        private static void addMeasurement(ref ScenarioBenchmark scenario, string name, MetricModel metric, double value)
        {
            var iteration = new IterationModel
            {
                Iteration = new Dictionary <string, double>()
            };

            iteration.Iteration.Add(metric.Name, value);

            var size = new ScenarioTestModel(name);

            size.Performance.Metrics.Add(metric);
            size.Performance.IterationModels.Add(iteration);
            scenario.Tests.Add(size);
        }
Ejemplo n.º 6
0
        private static ScenarioBenchmark PostRun()
        {
            // The XUnit output doesn't print the benchmark name, so print it now.
            Console.WriteLine("{0}", CurrentBenchmark.Name);

            var scenario = new ScenarioBenchmark(CurrentBenchmark.Name)
            {
                Namespace = "LinkBench"
            };

            CurrentBenchmark.Compute();

            addMeasurement(ref scenario, "MSIL Unlinked", SizeMetric, CurrentBenchmark.UnlinkedMsilSize);
            addMeasurement(ref scenario, "MSIL Linked", SizeMetric, CurrentBenchmark.LinkedMsilSize);
            addMeasurement(ref scenario, "MSIL Reduction", PercMetric, CurrentBenchmark.MsilSizeReduction);
            addMeasurement(ref scenario, "Total Uninked", SizeMetric, CurrentBenchmark.UnlinkedDirSize);
            addMeasurement(ref scenario, "Total Linked", SizeMetric, CurrentBenchmark.LinkedDirSize);
            addMeasurement(ref scenario, "Total Reduction", PercMetric, CurrentBenchmark.DirSizeReduction);

            return(scenario);
        }
Ejemplo n.º 7
0
        private static ScenarioBenchmark AddEtwData(
            ScenarioBenchmark scenarioBenchmark,
            ScenarioExecutionResult scenarioExecutionResult,
            IReadOnlyCollection <string> processesOfInterest,
            IReadOnlyCollection <string> modulesOfInterest)
        {
            var metricModels = scenarioExecutionResult.PerformanceMonitorCounters
                               .Select(pmc => new MetricModel {
                DisplayName = pmc.DisplayName,
                Name        = pmc.Name,
                Unit        = pmc.Unit,
            });

            // Get the list of processes of interest.
            Console.WriteLine($"Parsing: {scenarioExecutionResult.EventLogFileName}");
            var processes = new SimpleTraceEventParser().GetProfileData(scenarioExecutionResult);

            // Extract the Pmc data for each one of the processes.
            foreach (var process in processes)
            {
                if (!processesOfInterest.Any(p => p.Equals(process.Name, StringComparison.OrdinalIgnoreCase)))
                {
                    continue;
                }

                var processTest = scenarioBenchmark.Tests
                                  .SingleOrDefault(t => t.Name == process.Name && t.Namespace == "");
                if (processTest == null)
                {
                    processTest = new ScenarioTestModel(process.Name)
                    {
                        Namespace = "",
                    };
                    scenarioBenchmark.Tests.Add(processTest);

                    // Add metrics definitions.
                    processTest.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                    processTest.Performance.Metrics.AddRange(metricModels);
                }

                var processIterationModel = new IterationModel {
                    Iteration = new Dictionary <string, double>()
                };
                processTest.Performance.IterationModels.Add(processIterationModel);

                processIterationModel.Iteration.Add(
                    ElapsedTimeMilliseconds.Name, process.LifeSpan.Duration.TotalMilliseconds);

                // Add process metrics values.
                foreach (var pmcData in process.PerformanceMonitorCounterData)
                {
                    processIterationModel.Iteration.Add(pmcData.Key.Name, pmcData.Value);
                }

                foreach (var module in process.Modules)
                {
                    var moduleName = Path.GetFileName(module.FullName);
                    if (modulesOfInterest.Any(m => m.Equals(moduleName, StringComparison.OrdinalIgnoreCase)))
                    {
                        var moduleTestName = $"{moduleName}";
                        var moduleTest     = scenarioBenchmark.Tests
                                             .SingleOrDefault(t => t.Name == moduleTestName && t.Namespace == process.Name);

                        if (moduleTest == null)
                        {
                            moduleTest = new ScenarioTestModel(moduleTestName)
                            {
                                Namespace = process.Name,
                                Separator = "!",
                            };
                            scenarioBenchmark.Tests.Add(moduleTest);

                            // Add metrics definitions.
                            moduleTest.Performance.Metrics.AddRange(metricModels);
                        }

                        var moduleIterationModel = new IterationModel {
                            Iteration = new Dictionary <string, double>()
                        };
                        moduleTest.Performance.IterationModels.Add(moduleIterationModel);

                        // 5. Add module metrics values.
                        foreach (var pmcData in module.PerformanceMonitorCounterData)
                        {
                            moduleIterationModel.Iteration.Add(pmcData.Key.Name, pmcData.Value);
                        }
                    }
                }
            }

            return(scenarioBenchmark);
        }
Ejemplo n.º 8
0
        private ScenarioBenchmark PostRun(
            string scenarioTestModelName,
            IReadOnlyCollection <string> processesOfInterest,
            IReadOnlyCollection <string> modulesOfInterest)
        {
            PrintHeader("Post-Processing scenario data.");

            var scenarioBenchmark = new ScenarioBenchmark(_scenarioBenchmarkName);

            foreach (var iter in IterationsData)
            {
                var scenarioExecutionResult = iter.ScenarioExecutionResult;
                var scenarioTestModel       = scenarioBenchmark.Tests
                                              .SingleOrDefault(t => t.Name == scenarioTestModelName);

                if (scenarioTestModel == null)
                {
                    scenarioTestModel = new ScenarioTestModel(scenarioTestModelName);
                    scenarioBenchmark.Tests.Add(scenarioTestModel);

                    // Add measured metrics to each test.
                    scenarioTestModel.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                }

                scenarioTestModel.Performance.IterationModels.Add(new IterationModel {
                    Iteration = new Dictionary <string, double> {
                        { ElapsedTimeMilliseconds.Name, (scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime).TotalMilliseconds },
                    }
                });

                // Create (measured) test entries for this scenario.
                var startup = scenarioBenchmark.Tests
                              .SingleOrDefault(t => t.Name == "Startup" && t.Namespace == scenarioTestModel.Name);
                if (startup == null)
                {
                    startup = new ScenarioTestModel("Startup")
                    {
                        Namespace = scenarioTestModel.Name,
                    };
                    scenarioBenchmark.Tests.Add(startup);

                    // Add measured metrics to each test.
                    startup.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                }

                var firstRequest = scenarioBenchmark.Tests
                                   .SingleOrDefault(t => t.Name == "First Request" && t.Namespace == scenarioTestModel.Name);
                if (firstRequest == null)
                {
                    firstRequest = new ScenarioTestModel("First Request")
                    {
                        Namespace = scenarioTestModel.Name,
                    };
                    scenarioBenchmark.Tests.Add(firstRequest);

                    // Add measured metrics to each test.
                    firstRequest.Performance.Metrics.Add(ElapsedTimeMilliseconds);
                }

                startup.Performance.IterationModels.Add(new IterationModel {
                    Iteration = new Dictionary <string, double> {
                        { ElapsedTimeMilliseconds.Name, iter.StartupTime },
                    },
                });

                firstRequest.Performance.IterationModels.Add(new IterationModel {
                    Iteration = new Dictionary <string, double> {
                        { ElapsedTimeMilliseconds.Name, iter.FirstRequestTime },
                    },
                });

                if (!string.IsNullOrWhiteSpace(iter.ScenarioExecutionResult.EventLogFileName) &&
                    File.Exists(iter.ScenarioExecutionResult.EventLogFileName))
                {
                    // Adding ETW data.
                    scenarioBenchmark = AddEtwData(
                        scenarioBenchmark, iter.ScenarioExecutionResult, processesOfInterest, modulesOfInterest);
                }
            }

            for (int i = scenarioBenchmark.Tests.Count - 1; i >= 0; i--)
            {
                if (scenarioBenchmark.Tests[i].Performance.IterationModels.All(iter => iter.Iteration.Count == 0))
                {
                    scenarioBenchmark.Tests.RemoveAt(i);
                }
            }

            return(scenarioBenchmark);
        }
 private static string GetFilePathWithoutExtension(string outputDir, string runId, ScenarioBenchmark benchmark)
 {
     return(Path.Combine(outputDir, $"{runId}-{benchmark.Name}"));
 }
 public static void ConvertRunResult(ScenarioBenchmark scenario, BenchmarkRunResult runResult)
 {
     scenario.Tests = new List <ScenarioTestModel>();
     scenario.Tests.AddRange(ConvertRunResult(runResult));
 }