Esempio n. 1
0
        public static int Run(CompareOptions options)
        {
            var parameters = CreateParameters(
                (ConfigurationConstants.ExpectedUtterancesPathKey, options.ExpectedUtterancesPath),
                (ConfigurationConstants.ActualUtterancesPathKey, options.ActualUtterancesPath),
                (ConfigurationConstants.TestLabelKey, options.TestLabel));

            var arguments = new List <string> {
                $"-p:{parameters}"
            };

            if (options.OutputFolder != null)
            {
                arguments.Add($"--work={options.OutputFolder}");
            }

            if (options.Metadata)
            {
                var expectedUtterances = Read <List <JsonLabeledUtterance> >(options.ExpectedUtterancesPath);
                var actualUtterances   = Read <List <JsonLabeledUtterance> >(options.ActualUtterancesPath);
                var compareResults     = TestCaseSource.GetNLUCompareResults(expectedUtterances, actualUtterances);
                var metadataPath       = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestMetadataFileName) : TestMetadataFileName;

                var statisticsPath = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestStatisticsFileName) : TestStatisticsFileName;

                Write(metadataPath, compareResults.TestCases);
                File.WriteAllText(statisticsPath, JObject.FromObject(compareResults.Statistics).ToString());
                compareResults.PrintResults();
            }

            new AutoRun(typeof(ConfigurationConstants).Assembly).Execute(arguments.ToArray());

            // We don't care if there are any failing NUnit tests
            return(0);
        }
Esempio n. 2
0
        private static int RunJson(CompareOptions options)
        {
            var expectedUtterances = Read <List <JsonLabeledUtterance> >(options.ExpectedUtterancesPath);
            var actualUtterances   = Read <List <LabeledUtterance> >(options.ActualUtterancesPath);
            var testSettings       = new TestSettings(options.TestSettingsPath, options.UnitTestMode);
            var compareResults     = TestCaseSource.GetNLUCompareResults(expectedUtterances, actualUtterances, testSettings);

            var baseline = options.BaselinePath != null?Read <NLUStatistics>(options.BaselinePath) : null;

            compareResults.PrintResults(baseline);

            var metadataPath = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestMetadataFileName) : TestMetadataFileName;

            var statisticsPath = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestStatisticsFileName) : TestStatisticsFileName;

            Write(metadataPath, compareResults.TestCases);
            File.WriteAllText(statisticsPath, JObject.FromObject(compareResults.Statistics).ToString());

            var failedThresholds = testSettings.Thresholds
                                   .Where(t => !compareResults.Statistics.CheckThreshold(baseline, t))
                                   .ToList();

            if (failedThresholds.Count > 0)
            {
                var failedThresholdsInfo = string.Join(", ", failedThresholds.Select(t => t.GetDescription()));
                Logger.LogWarning($"Performance threshold not met for {failedThresholdsInfo}.");
            }

            return(failedThresholds.Count);
        }