Esempio n. 1
0
        public void ParseOptions(PerformanceBenchmark performanceBenchmark, IEnumerable <string> args)
        {
            var os = GetOptions(performanceBenchmark);

            try
            {
                var remaining = os.Parse(args);

                if (help)
                {
                    ShowHelp(string.Empty, os);
                }

                if (!performanceBenchmark.ResultXmlFilePaths.Any() && !performanceBenchmark.ResultXmlDirectoryPaths.Any())
                {
                    ShowHelp("Missing required option --results=(filePath|directoryPath)", os);
                }

                if (remaining.Any())
                {
                    var errorMessage = string.Format("Unknown option: '{0}.\r\n'", remaining[0]);
                    ShowHelp(errorMessage, os);
                }
            }
            catch (Exception e)
            {
                ShowHelp(string.Format("Error encountered while parsing option: {0}.\r\n", e.Message), os);
            }
        }
 public void PerformFinalMetadataUpdate(PerformanceBenchmark performanceBenchmark)
 {
     // The keys in the extraMetadataExtractFields structure have additional embedded metadata that we extract out.
     // This renders the raw, unextracted, value of this field unusable, so we discard it
     foreach (var metadataName in extraMetadataExtractFields.Keys)
     {
         foreach (var typeMetadata in performanceBenchmark.TestRunMetadataProcessor.TypeMetadata)
         {
             typeMetadata.FieldGroups.RemoveAll(fg => fg.FieldName.Equals(metadataName));
         }
     }
 }
Esempio n. 3
0
        private static void Main(string[] args)
        {
            var aggregateTestRunResults           = new List <PerformanceTestRunResult>();
            var baselinePerformanceTestRunResults = new List <PerformanceTestRunResult>();
            var baselineTestResults       = new List <TestResult>();
            var performanceTestRunResults = new List <PerformanceTestRunResult>();
            var testResults          = new List <TestResult>();
            var performanceBenchmark = new PerformanceBenchmark(ExcludedConfigFieldNames);
            var optionsParser        = new OptionsParser();

            optionsParser.ParseOptions(performanceBenchmark, args);
            var testResultXmlParser = new TestResultXmlParser();

            if (performanceBenchmark.BaselineResultFilesExist)
            {
                performanceBenchmark.AddBaselinePerformanceTestRunResults(testResultXmlParser, baselinePerformanceTestRunResults, baselineTestResults);

                if (baselinePerformanceTestRunResults.Any())
                {
                    aggregateTestRunResults.AddRange(baselinePerformanceTestRunResults);
                }
                else
                {
                    Environment.Exit(1);
                }
            }

            if (performanceBenchmark.ResultFilesExist)
            {
                performanceBenchmark.AddPerformanceTestRunResults(testResultXmlParser, performanceTestRunResults, testResults, baselineTestResults);

                if (performanceTestRunResults.Any())
                {
                    aggregateTestRunResults.AddRange(performanceTestRunResults);
                }
                else
                {
                    Environment.Exit(1);
                }
            }

            var reportWriter = new ReportWriter(ExcludedConfigFieldNames);

            reportWriter.WriteReport(
                aggregateTestRunResults,
                performanceBenchmark.MetadataValidator,
                performanceBenchmark.SigFig,
                performanceBenchmark.ReportDirPath,
                performanceBenchmark.BaselineResultFilesExist);
        }
 private OptionSet GetOptions(PerformanceBenchmark performanceBenchmark)
 {
     return(new OptionSet()
            .Add("?|help|h", "Prints out the options.", option => help = option != null)
            .Add("results|testresultsxmlsource=", "REQUIRED - Path to a test result XML filename OR directory. Directories are searched resursively. You can repeat this option with multiple result file or directory paths.",
                 xmlsource =>
     {
         performanceBenchmark.AddXmlSourcePath(xmlsource, "results", ResultType.Test);
     })
            .Add("baseline|baselinexmlsource:", "OPTIONAL - Path to a baseline XML filename.",
                 xmlsource =>
     {
         performanceBenchmark.AddXmlSourcePath(xmlsource, "baseline", ResultType.Baseline);
     })
            .Add("report|reportdirpath:", "OPTIONAL - Path to where the report will be written. Default is current working directory.",
                 performanceBenchmark.AddReportDirPath));
 }
Esempio n. 5
0
        private OptionSet GetOptions(PerformanceBenchmark performanceBenchmark)
        {
            var optionsSet = new OptionSet();

            optionsSet.Add("?|help|h", "Prints out the options.", option => help = option != null);
            optionsSet.Add(
                "results|testresultsxmlsource=",
                "REQUIRED - Path to a test result XML filename OR directory. Directories are searched resursively. You can repeat this option with multiple result file or directory paths.",
                xmlsource => performanceBenchmark.AddXmlSourcePath(xmlsource, "results", ResultType.Test));
            optionsSet.Add(
                "baseline|baselinexmlsource:", "OPTIONAL - Path to a baseline XML filename.",
                xmlsource => performanceBenchmark.AddXmlSourcePath(xmlsource, "baseline", ResultType.Baseline));
            optionsSet.Add(
                "report|reportdirpath:", "OPTIONAL - Path to where the report will be written. Default is current working directory.",
                performanceBenchmark.AddReportDirPath);
            optionsSet.Add("failonbaseline",
                           "Enable return '1' by the reporter if a baseline is passed in and one or more matching configs is out of threshold. Disabled is default. Use option to enable, or use option and append '-' to explicitly disable.",
                           option => performanceBenchmark.FailOnBaseline = option != null);
            return(optionsSet);
        }
        private OptionSet GetOptions(PerformanceBenchmark performanceBenchmark)
        {
            var optionsSet = new OptionSet();

            optionsSet.Add("?|help|h", "Prints out the options.", option => help = option != null);
            optionsSet.Add("dataversion|version=", "Sets Expected Perf Data Version for Results and Baseline Files (1 = V1 2 = V2). Versions of Unity Perf Framework 2.0 or newer will use the V2 data format. If no arg is provided we assume the format is V2", version => performanceBenchmark.SetDataVersion(version));
            optionsSet.Add("fileformat|format=", "Sets Expected File Format for Results and Baseline Files. If no arg is provided we assume the format is XML", filtype => performanceBenchmark.SetFileType(filtype));
            optionsSet.Add(
                "results|testresultsxmlsource=",
                "REQUIRED - Path to a test result XML filename OR directory. Directories are searched resursively. You can repeat this option with multiple result file or directory paths.",
                xmlsource => performanceBenchmark.AddSourcePath(xmlsource, "results", ResultType.Test));
            optionsSet.Add(
                "baseline|baselinexmlsource:", "OPTIONAL - Path to a baseline XML filename.",
                xmlsource => performanceBenchmark.AddSourcePath(xmlsource, "baseline", ResultType.Baseline));
            optionsSet.Add(
                "report|reportdirpath:", "OPTIONAL - Path to where the report will be written. Default is current working directory.",
                performanceBenchmark.AddReportDirPath);
            optionsSet.Add("failonbaseline",
                           "Enable return '1' by the reporter if a baseline is passed in and one or more matching configs is out of threshold. Disabled is default. Use option to enable, or use option and append '-' to explicitly disable.",
                           option => performanceBenchmark.FailOnBaseline = option != null);
            return(optionsSet);
        }
Esempio n. 7
0
        private static void Main(string[] args)
        {
            var aggregateTestRunResults           = new List <PerformanceTestRunResult>();
            var baselinePerformanceTestRunResults = new List <PerformanceTestRunResult>();
            var baselineTestResults       = new List <TestResult>();
            var performanceTestRunResults = new List <PerformanceTestRunResult>();
            var testResults          = new List <TestResult>();
            var performanceBenchmark = new PerformanceBenchmark(ExcludedConfigFieldNames);
            var optionsParser        = new OptionsParser();

            optionsParser.ParseOptions(performanceBenchmark, args);
            var testResultXmlParser = new TestResultXmlParser();

            if (performanceBenchmark.BaselineResultFilesExist)
            {
                performanceBenchmark.AddBaselinePerformanceTestRunResults(testResultXmlParser, baselinePerformanceTestRunResults, baselineTestResults);

                if (baselinePerformanceTestRunResults.Any())
                {
                    aggregateTestRunResults.AddRange(baselinePerformanceTestRunResults);
                }
                else
                {
                    Environment.Exit(1);
                }
            }

            if (performanceBenchmark.ResultFilesExist)
            {
                performanceBenchmark.AddPerformanceTestRunResults(testResultXmlParser, performanceTestRunResults, testResults, baselineTestResults);

                if (performanceTestRunResults.Any())
                {
                    aggregateTestRunResults.AddRange(performanceTestRunResults);
                }
                else
                {
                    Environment.Exit(1);
                }
            }

            var performanceTestResults = new PerformanceTestRunResult[0];

            if (aggregateTestRunResults.Any(a => a.IsBaseline))
            {
                Array.Resize(ref performanceTestResults, 1);
                performanceTestResults[0] = aggregateTestRunResults.First(a => a.IsBaseline);
            }

            var nonBaselineTestRunResults = aggregateTestRunResults.Where(a => !a.IsBaseline).ToList();

            nonBaselineTestRunResults.Sort((run1, run2) => string.Compare(run1.ResultName, run2.ResultName, StringComparison.Ordinal));


            foreach (var performanceTestRunResult in nonBaselineTestRunResults)
            {
                Array.Resize(ref performanceTestResults, performanceTestResults.Length + 1);
                performanceTestResults[performanceTestResults.Length - 1] = performanceTestRunResult;
            }


            var reportWriter = new ReportWriter(performanceBenchmark.TestRunMetadataProcessor);

            reportWriter.WriteReport(
                performanceTestResults,
                performanceBenchmark.SigFig,
                performanceBenchmark.ReportDirPath,
                performanceBenchmark.ReportHtmlName,
                performanceBenchmark.BaselineResultFilesExist);
        }
Esempio n. 8
0
        private static int Main(string[] args)
        {
            var aggregateTestRunResults           = new List <PerformanceTestRunResult>();
            var baselinePerformanceTestRunResults = new List <PerformanceTestRunResult>();
            var baselineTestResults       = new List <TestResult>();
            var performanceTestRunResults = new List <PerformanceTestRunResult>();
            var testResults          = new List <TestResult>();
            var performanceBenchmark = new PerformanceBenchmark(ExcludedConfigFieldNames);
            var optionsParser        = new OptionsParser();

            optionsParser.ParseOptions(performanceBenchmark, args);
            var testResultXmlParser = new TestResultXmlParser();

            if (performanceBenchmark.BaselineResultFilesExist)
            {
                performanceBenchmark.AddBaselinePerformanceTestRunResults(testResultXmlParser, baselinePerformanceTestRunResults, baselineTestResults);

                if (baselinePerformanceTestRunResults.Any())
                {
                    aggregateTestRunResults.AddRange(baselinePerformanceTestRunResults);
                }
                else
                {
                    Environment.Exit(1);
                }
            }

            if (performanceBenchmark.ResultFilesExist)
            {
                performanceBenchmark.AddPerformanceTestRunResults(testResultXmlParser, performanceTestRunResults, testResults, baselineTestResults);

                if (performanceTestRunResults.Any())
                {
                    aggregateTestRunResults.AddRange(performanceTestRunResults);
                }
                else
                {
                    Environment.Exit(1);
                }
            }

            var performanceTestResults = new PerformanceTestRunResult[0];

            // If we have a baseline
            if (aggregateTestRunResults.Any(a => a.IsBaseline))
            {
                // Insert the baseline in the front of the array results; this way we can display the baseline first in the report
                Array.Resize(ref performanceTestResults, 1);
                performanceTestResults[0] = aggregateTestRunResults.First(a => a.IsBaseline);
            }

            var nonBaselineTestRunResults = aggregateTestRunResults.Where(a => !a.IsBaseline).ToList();

            nonBaselineTestRunResults.Sort((run1, run2) => string.Compare(run1.ResultName, run2.ResultName, StringComparison.Ordinal));


            foreach (var performanceTestRunResult in nonBaselineTestRunResults)
            {
                Array.Resize(ref performanceTestResults, performanceTestResults.Length + 1);
                performanceTestResults[performanceTestResults.Length - 1] = performanceTestRunResult;
            }

            performanceBenchmark.TestRunMetadataProcessor.PerformFinalMetadataUpdate(performanceBenchmark);

            var reportWriter = new ReportWriter(performanceBenchmark.TestRunMetadataProcessor);

            reportWriter.WriteReport(
                performanceTestResults,
                performanceBenchmark.SigFig,
                performanceBenchmark.ReportDirPath,
                performanceBenchmark.BaselineResultFilesExist);

            return(WriteFailedTestsAndMetricsToConsole(performanceTestResults, performanceBenchmark));
        }
Esempio n. 9
0
        private static int WriteFailedTestsAndMetricsToConsole(PerformanceTestRunResult[] performanceTestResults, PerformanceBenchmark performanceBenchmark)
        {
            var failedTestsExist = performanceTestResults.SelectMany(ptr => ptr.TestResults)
                                   .Any(tr => tr.State == (int)TestState.Failure);

            if (failedTestsExist)
            {
                WriteLine("FAILURE: One ore more performance test metric aggregations is out of threshold from the baseline value.");
                WriteLine("-------------------------------------");
                WriteLine(" Performance tests with failed metrics");
                WriteLine("-------------------------------------");
                foreach (var performanceTestRunResult in performanceTestResults)
                {
                    var failedTests = performanceTestRunResult.TestResults.Where(tr => tr.State == (int)TestState.Failure);
                    if (failedTests.Any())
                    {
                        foreach (var failedTest in failedTests)
                        {
                            ++indentLevel;
                            WriteLine("{0}", failedTest.TestName);

                            var regressedSgs = failedTest.SampleGroupResults.Where(sgr => sgr.Regressed);
                            foreach (var sampleGroupResult in regressedSgs)
                            {
                                WriteLine("----");
                                WriteLine("Metric        : {0}", sampleGroupResult.SampleGroupName);
                                WriteLine("Aggregation   : {0}", sampleGroupResult.AggregationType);
                                WriteLine("Failed Value  : {0,8:F2}", sampleGroupResult.AggregatedValue);
                                WriteLine("Baseline Value: {0,8:F2}", sampleGroupResult.BaselineValue);
                                WriteLine("Threshold %   : {0,8:F2}", sampleGroupResult.Threshold);
                                WriteLine("Actual Diff % : {0,8:F2}", Math.Abs(sampleGroupResult.AggregatedValue - sampleGroupResult.BaselineValue) / sampleGroupResult.BaselineValue);
                            }
                            --indentLevel;
                            WriteLine("\r\n");
                        }
                    }
                }
            }

            return(performanceBenchmark.FailOnBaseline && failedTestsExist ? 1 : 0);
        }
        private static void WriteProgressedTestsAndMetricsToConsole(PerformanceTestRunResult[] performanceTestResults, PerformanceBenchmark performanceBenchmark)
        {
            bool loggedHeader     = false;
            var  passedTestsExist = performanceTestResults.SelectMany(ptr => ptr.TestResults)
                                    .Any(tr => tr.State == (int)TestState.Success);

            if (passedTestsExist)
            {
                foreach (var performanceTestRunResult in performanceTestResults)
                {
                    var passedTests = performanceTestRunResult.TestResults.Where(tr => tr.State == (int)TestState.Success);
                    if (passedTests.Any())
                    {
                        foreach (var tests in passedTests)
                        {
                            if (tests.SampleGroupResults.Any(sgr => sgr.Progressed))
                            {
                                if (!loggedHeader)
                                {
                                    loggedHeader = true;
                                    WriteLine("Info: One ore more performance test metric aggregations is out of threshold from the baseline value.");
                                    WriteLine("-------------------------------------");
                                    WriteLine(" Performance tests with Progressed metrics");
                                    WriteLine("-------------------------------------");
                                }

                                ++indentLevel;
                                WriteLine("{0}", tests.TestName);

                                var progressedSgs = tests.SampleGroupResults.Where(sgr => sgr.Progressed);
                                foreach (var sampleGroupResult in progressedSgs)
                                {
                                    WriteLine("----");
                                    WriteLine("Metric        : {0}", sampleGroupResult.SampleGroupName);
                                    WriteLine("Aggregation   : {0}", sampleGroupResult.AggregationType);
                                    WriteLine("New Value  : {0,8:F2}", sampleGroupResult.AggregatedValue);
                                    WriteLine("Baseline Value: {0,8:F2}", sampleGroupResult.BaselineValue);
                                    WriteLine("Threshold %   : {0,8:F2}", sampleGroupResult.Threshold);
                                    WriteLine("Actual Diff % : {0,8:F2}", Math.Abs(sampleGroupResult.BaselineValue - sampleGroupResult.AggregatedValue) / sampleGroupResult.BaselineValue);
                                }
                                --indentLevel;
                                WriteLine("\r\n");
                            }
                        }
                    }
                }
            }
        }