Beispiel #1
0
        private static void Compare(string baseResultsPath, string newResultsPath, string savePath)
        {
            if (baseResultsPath == string.Empty)
            {
                Console.WriteLine("Please generate a baseline benchmark results file," +
                                  "or provide a path for a baseline benchmark results file.");
                return;
            }
            if (newResultsPath == string.Empty)
            {
                Console.WriteLine("Please provide a path to a benchmark results file to compare against the baseline.");
                return;
            }

            // Create Model comparer
            Console.WriteLine("\nComparison of Model tests: \n");
            var baseModelPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), "DynamoPerformanceTests.DynamoModelPerformanceTestBase-report.csv");
            var newModelPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), "DynamoPerformanceTests.DynamoModelPerformanceTestBase-report.csv");
            var modelSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), "DynamoPerformanceTests.Comparison-Model.csv");
            var modelComparer = new ResultsComparer(baseModelPath, newModelPath, modelSavePath);

            // Create View comparer
            Console.WriteLine("\nComparison of View tests: \n");
            var baseViewPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), "DynamoPerformanceTests.DynamoViewPerformanceTestBase-report.csv");
            var newViewPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), "DynamoPerformanceTests.DynamoViewPerformanceTestBase-report.csv");
            var viewSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), "DynamoPerformanceTests.Comparison-View.csv");
            var viewComparer = new ResultsComparer(baseViewPath, newViewPath, viewSavePath);
        }
        private static void Compare(string baseResultsPath, string newResultsPath, string savePath)
        {
            if (baseResultsPath == string.Empty)
            {
                Console.WriteLine("Please generate a baseline benchmark results file," +
                                  "or provide a path for a baseline benchmark results file.");
                return;
            }
            if (newResultsPath == string.Empty)
            {
                Console.WriteLine("Please provide a path to a benchmark results file to compare against the baseline.");
                return;
            }

            ResultsComparer modelComparer = null;
            ResultsComparer viewComparer  = null;

            try
            {
                // Create Model comparer
                Console.WriteLine("\nComparison of Model tests: \n");
                var baseModelPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), modelTestBaseReport);
                var newModelPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), modelTestBaseReport);
                var modelSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), modelTestComparison);

                modelComparer = new ResultsComparer(baseModelPath, newModelPath, modelSavePath);

                // Create View comparer
                Console.WriteLine("\nComparison of View tests: \n");
                var baseViewPath = Path.Combine(PerformanceTestHelper.GetFullPath(baseResultsPath), viewTestBaseReport);
                var newViewPath  = Path.Combine(PerformanceTestHelper.GetFullPath(newResultsPath), viewTestBaseReport);
                var viewSavePath = Path.Combine(PerformanceTestHelper.GetFullPath(savePath), viewTestComparison);

                viewComparer = new ResultsComparer(baseViewPath, newViewPath, viewSavePath);
            }
            //catch here - likely we could not create a view comparer if view comparison did not run.
            catch (Exception e)
            {
                Console.WriteLine($"exception while comparing results {e} {Environment.NewLine} {e.Message}");
            }

            //return result of comparison
            if (modelComparer != null)
            {
                if (modelComparer.ComparisonData.Any(x => x.ResultState == ResultsComparer.ComparisonResultState.FAIL))
                {
                    Console.WriteLine("Comparison failed, some model performance benchmarks failed. Please see log above for details.");
                    Environment.Exit((int)ExitCode.ComparisonFailure);
                }
            }

            if (viewComparer != null)
            {
                if (viewComparer.ComparisonData.Any(x => x.ResultState == ResultsComparer.ComparisonResultState.FAIL))
                {
                    Console.WriteLine("Comparison failed, some view performance benchmarks failed. Please see log above for details.");
                    Environment.Exit((int)ExitCode.ComparisonFailure);
                }
            }
        }
Beispiel #3
0
        private Dictionary <string, BenchmarkResult> ImportResultsCSV(string csvPath)
        {
            // Get csv
            csvPath = PerformanceTestHelper.GetFullPath(csvPath);
            TextFieldParser parser = new TextFieldParser(csvPath);

            parser.HasFieldsEnclosedInQuotes = true;
            parser.SetDelimiters(",");

            // Get csv header
            var header = parser.ReadFields();

            // Check that columns we care about exist
            var columnNames    = new string[] { "Method", "Graph", "Mean", "Error", "StdDev" };
            var missingColumns = columnNames.Where(c => !header.Contains(c));

            if (missingColumns.Any())
            {
                throw new Exception(string.Format("The csv file at {0} does not contain the following required columns: {1}.", csvPath, string.Join(", ", missingColumns)));
            }

            // Get indices for columns we care about
            var iMethod = Array.IndexOf(header, columnNames[0]);
            var iGraph  = Array.IndexOf(header, columnNames[1]);
            var iMean   = Array.IndexOf(header, columnNames[2]);
            var iError  = Array.IndexOf(header, columnNames[3]);
            var iStdDev = Array.IndexOf(header, columnNames[4]);

            // Parse results
            var benchmarkResults = new Dictionary <string, BenchmarkResult>();

            while (!parser.EndOfData)
            {
                // Get full benchmark data from csv
                var brLine = parser.ReadFields();

                // Get the values we care about
                var method      = brLine[iMethod];
                var graph       = brLine[iGraph];
                var mean        = Convert.ToDouble(Regex.Replace(brLine[iMean], "[^0-9|.|,]", ""), CultureInfo.InvariantCulture);
                var error       = Convert.ToDouble(Regex.Replace(brLine[iError], "[^0-9|.|,]", ""), CultureInfo.InvariantCulture);
                var stdDev      = Convert.ToDouble(Regex.Replace(brLine[iStdDev], "[^0-9|.|,]", ""), CultureInfo.InvariantCulture);
                var meanUnits   = new string(brLine[iMean].ToCharArray().Where(c => Char.IsLetter(c)).ToArray());
                var errorUnits  = new string(brLine[iError].ToCharArray().Where(c => Char.IsLetter(c)).ToArray());
                var stdDevUnits = new string(brLine[iStdDev].ToCharArray().Where(c => Char.IsLetter(c)).ToArray());

                // Store the benchmark based on its 'Method' and 'Graph' values
                benchmarkResults[method + graph] = (new BenchmarkResult(method, graph, mean, meanUnits, error, errorUnits, stdDev, stdDevUnits));
            }
            return(benchmarkResults);
        }
Beispiel #4
0
        public static void Main(string[] args)
        {
            var showHelp = false;

            if (args.Length <= 0)
            {
                Console.WriteLine("Please specify a command.");
                return;
            }

            // Default arguments
            IConfig config             = PerformanceTestHelper.getFastReleaseConfig();
            var     testDirectory      = "../../../graphs/";
            var     baseResultsPath    = string.Empty;
            var     newResultsPath     = "BenchmarkDotNet.Artifacts/results/";
            var     saveComparisonPath = string.Empty;

            // Command line options
            var opts = new OptionSet()
            {
                { "g=|graphs=", "Path to Directory containing test graphs. Defaults to 'Dynamo/tools/Performance/DynamoPerformanceTests/graphs/'", v => { testDirectory = v; } },
                { "b=|base=", "Path to Directory containing performance results files to use as comparison base. Defaults to 'BenchmarkDotNet.Artifacts/results/'", v => { baseResultsPath = v; } },
                { "n=|new=", "Path to Directory containing new performance results files to compare against the baseline", v => { newResultsPath = v; } },
                { "s=|save=", "Location to save comparison csv", v => { saveComparisonPath = v; } },
                { "h|help", "show this message and return", v => showHelp = v != null },
            };

            opts.Parse(args);

            // Show help
            if (showHelp)
            {
                ShowHelp(opts);
                return;
            }

            // Get command
            Command command;
            var     commandRecognized = Enum.TryParse(args[0], out command);

            if (!commandRecognized)
            {
                Console.WriteLine("Command \"{0}\" not recognized.", args[0]);
                return;
            }

            // Execute command
            switch (command)
            {
            case Command.NonOptimizedBenchmark:
                config = PerformanceTestHelper.getDebugConfig();
                goto case Command.Benchmark;

            case Command.DebugInProcess:
                config = PerformanceTestHelper.getDebugInProcessConfig();
                goto case Command.Benchmark;

            case Command.Benchmark:
                DynamoViewPerformanceTestBase.testDirectory = testDirectory;
                var runSummaryWithUI = BenchmarkRunner.Run <DynamoViewPerformanceTestBase>(config);

                goto case Command.ModelOnlyBenchmark;

            case Command.ModelOnlyBenchmark:
                DynamoModelPerformanceTestBase.testDirectory = testDirectory;
                var runSummaryWithoutUI = BenchmarkRunner.Run <DynamoModelPerformanceTestBase>(config);
                break;

            case Command.StandardConfigModelOnlyBenchmark:
                config = PerformanceTestHelper.getReleaseConfig();
                goto case Command.ModelOnlyBenchmark;

            case Command.Compare:
                Compare(baseResultsPath, newResultsPath, saveComparisonPath);
                break;
            }
        }
Beispiel #5
0
 /// <summary>
 /// Override this function to preload dlls into Dynamo library
 /// </summary>
 /// <param name="libraries">extra dlls to load</param>
 protected override void GetLibrariesToPreload(List <string> libraries)
 {
     libraries.AddRange(PerformanceTestHelper.getDynamoDefaultLibs());
     base.GetLibrariesToPreload(libraries);
 }
Beispiel #6
0
        /// <summary>
        /// This is the entrance of Dynamo Performance Console App
        /// </summary>
        /// <param name="args"></param>
        public static void Main(string[] args)
        {
            DirectoryInfo dir;

            // Running with an input dir location:
            // DynamoPerformanceTests.exe "C:\directory path\"
            //
            if (args.Length <= 0)
            {
                Console.WriteLine("Supply a path to a test directory containing DYN files");
            }
            else
            {
                try
                {
                    dir = new DirectoryInfo(args[0]);

                    // Use helper to get debug config in order to run benchmarks on debug build of DynamoCore
                    // PerformanceTestHelper.getDebugConfig();

                    // Use helper to get debug in process config in order to debug benchmarks
                    // PerformanceTestHelper.getDebugInProcessConfig();

                    DynamoViewPerformanceTestBase.testDirectory = dir.FullName;
                    var runSummaryWithUI = BenchmarkRunner.Run <DynamoViewPerformanceTestBase>(PerformanceTestHelper.getFastReleaseConfig());

                    DynamoModelPerformanceTestBase.testDirectory = dir.FullName;
                    var runSummaryWithoutUI = BenchmarkRunner.Run <DynamoModelPerformanceTestBase>(PerformanceTestHelper.getFastReleaseConfig());
                }
                catch
                {
                    Console.WriteLine("Not a valid path");
                }
            }
        }