Beispiel #1
0
        private static void Compare(string baseResultsPath, string newResultsPath, string savePath)
        {
            if (baseResultsPath == string.Empty)
            {
                Console.WriteLine("Please generate a baseline benchmark results file," +
                                  "or provide a path for a baseline benchmark results file.");
                return;
            }
            if (newResultsPath == string.Empty)
            {
                Console.WriteLine("Please provide a path to a benchmark results file to compare against the baseline.");
                return;
            }

            // Create Model comparer
            Console.WriteLine("\nComparison of Model tests: \n");
            var baseModelPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), "DynamoPerformanceTests.DynamoModelPerformanceTestBase-report.csv");
            var newModelPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), "DynamoPerformanceTests.DynamoModelPerformanceTestBase-report.csv");
            var modelSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), "DynamoPerformanceTests.Comparison-Model.csv");
            var modelComparer = new ResultsComparer(baseModelPath, newModelPath, modelSavePath);

            // Create View comparer
            Console.WriteLine("\nComparison of View tests: \n");
            var baseViewPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), "DynamoPerformanceTests.DynamoViewPerformanceTestBase-report.csv");
            var newViewPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), "DynamoPerformanceTests.DynamoViewPerformanceTestBase-report.csv");
            var viewSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), "DynamoPerformanceTests.Comparison-View.csv");
            var viewComparer = new ResultsComparer(baseViewPath, newViewPath, viewSavePath);
        }
        private static void Compare(string baseResultsPath, string newResultsPath, string savePath)
        {
            if (baseResultsPath == string.Empty)
            {
                Console.WriteLine("Please generate a baseline benchmark results file," +
                                  "or provide a path for a baseline benchmark results file.");
                return;
            }
            if (newResultsPath == string.Empty)
            {
                Console.WriteLine("Please provide a path to a benchmark results file to compare against the baseline.");
                return;
            }

            ResultsComparer modelComparer = null;
            ResultsComparer viewComparer  = null;

            try
            {
                // Create Model comparer
                Console.WriteLine("\nComparison of Model tests: \n");
                var baseModelPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), modelTestBaseReport);
                var newModelPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), modelTestBaseReport);
                var modelSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), modelTestComparison);

                modelComparer = new ResultsComparer(baseModelPath, newModelPath, modelSavePath);

                // Create View comparer
                Console.WriteLine("\nComparison of View tests: \n");
                var baseViewPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), viewTestBaseReport);
                var newViewPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), viewTestBaseReport);
                var viewSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), viewTestComparison);

                viewComparer = new ResultsComparer(baseViewPath, newViewPath, viewSavePath);
            }
            //catch here - likely we could not create a view comparer if view comparison did not run.
            catch (Exception e)
            {
                Console.WriteLine($"exception while comparing results {e} {Environment.NewLine} {e.Message}");
            }

            //return result of comparison
            if (modelComparer != null)
            {
                if (modelComparer.ComparisonData.Any(x => x.ResultState == ResultsComparer.ComparisonResultState.FAIL))
                {
                    Console.WriteLine("Comparison failed, some model performance benchmarks failed. Please see log above for details.");
                    Environment.Exit((int)ExitCode.ComparisonFailure);
                }
            }

            if (viewComparer != null)
            {
                if (viewComparer.ComparisonData.Any(x => x.ResultState == ResultsComparer.ComparisonResultState.FAIL))
                {
                    Console.WriteLine("Comparison failed, some view performance benchmarks failed. Please see log above for details.");
                    Environment.Exit((int)ExitCode.ComparisonFailure);
                }
            }
        }
Beispiel #3
0
        private Dictionary <string, BenchmarkResult> ImportResultsCSV(string csvPath)
        {
            // Get csv
            csvPath = PerformanceTestHelper.GetFullPath(csvPath);
            TextFieldParser parser = new TextFieldParser(csvPath);

            parser.HasFieldsEnclosedInQuotes = true;
            parser.SetDelimiters(",");

            // Get csv header
            var header = parser.ReadFields();

            // Check that columns we care about exist
            var columnNames    = new string[] { "Method", "Graph", "Mean", "Error", "StdDev" };
            var missingColumns = columnNames.Where(c => !header.Contains(c));

            if (missingColumns.Any())
            {
                throw new Exception(string.Format("The csv file at {0} does not contain the following required columns: {1}.", csvPath, string.Join(", ", missingColumns)));
            }

            // Get indices for columns we care about
            var iMethod = Array.IndexOf(header, columnNames[0]);
            var iGraph  = Array.IndexOf(header, columnNames[1]);
            var iMean   = Array.IndexOf(header, columnNames[2]);
            var iError  = Array.IndexOf(header, columnNames[3]);
            var iStdDev = Array.IndexOf(header, columnNames[4]);

            // Parse results
            var benchmarkResults = new Dictionary <string, BenchmarkResult>();

            while (!parser.EndOfData)
            {
                // Get full benchmark data from csv
                var brLine = parser.ReadFields();

                // Get the values we care about
                var method      = brLine[iMethod];
                var graph       = brLine[iGraph];
                var mean        = Convert.ToDouble(Regex.Replace(brLine[iMean], "[^0-9|.|,]", ""), CultureInfo.InvariantCulture);
                var error       = Convert.ToDouble(Regex.Replace(brLine[iError], "[^0-9|.|,]", ""), CultureInfo.InvariantCulture);
                var stdDev      = Convert.ToDouble(Regex.Replace(brLine[iStdDev], "[^0-9|.|,]", ""), CultureInfo.InvariantCulture);
                var meanUnits   = new string(brLine[iMean].ToCharArray().Where(c => Char.IsLetter(c)).ToArray());
                var errorUnits  = new string(brLine[iError].ToCharArray().Where(c => Char.IsLetter(c)).ToArray());
                var stdDevUnits = new string(brLine[iStdDev].ToCharArray().Where(c => Char.IsLetter(c)).ToArray());

                // Store the benchmark based on its 'Method' and 'Graph' values
                benchmarkResults[method + graph] = (new BenchmarkResult(method, graph, mean, meanUnits, error, errorUnits, stdDev, stdDevUnits));
            }
            return(benchmarkResults);
        }