Esempio n. 1
0
        public static int Run(CompareOptions options)
        {
            var parameters = CreateParameters(
                (ConfigurationConstants.ExpectedUtterancesPathKey, options.ExpectedUtterancesPath),
                (ConfigurationConstants.ActualUtterancesPathKey, options.ActualUtterancesPath),
                (ConfigurationConstants.TestLabelKey, options.TestLabel));

            var arguments = new List <string> {
                $"-p:{parameters}"
            };

            if (options.OutputFolder != null)
            {
                arguments.Add($"--work={options.OutputFolder}");
            }

            if (options.Metadata)
            {
                var expectedUtterances = Read <List <JsonLabeledUtterance> >(options.ExpectedUtterancesPath);
                var actualUtterances   = Read <List <JsonLabeledUtterance> >(options.ActualUtterancesPath);
                var compareResults     = TestCaseSource.GetNLUCompareResults(expectedUtterances, actualUtterances);
                var metadataPath       = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestMetadataFileName) : TestMetadataFileName;

                var statisticsPath = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestStatisticsFileName) : TestStatisticsFileName;

                Write(metadataPath, compareResults.TestCases);
                File.WriteAllText(statisticsPath, JObject.FromObject(compareResults.Statistics).ToString());
                compareResults.PrintResults();
            }

            new AutoRun(typeof(ConfigurationConstants).Assembly).Execute(arguments.ToArray());

            // We don't care if there are any failing NUnit tests
            return(0);
        }
Esempio n. 2
0
        private static int RunJson(CompareOptions options)
        {
            var expectedUtterances = Read <List <JsonLabeledUtterance> >(options.ExpectedUtterancesPath);
            var actualUtterances   = Read <List <LabeledUtterance> >(options.ActualUtterancesPath);
            var testSettings       = new TestSettings(options.TestSettingsPath, options.UnitTestMode);
            var compareResults     = TestCaseSource.GetNLUCompareResults(expectedUtterances, actualUtterances, testSettings);

            var baseline = options.BaselinePath != null?Read <NLUStatistics>(options.BaselinePath) : null;

            compareResults.PrintResults(baseline);

            var metadataPath = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestMetadataFileName) : TestMetadataFileName;

            var statisticsPath = options.OutputFolder != null?Path.Combine(options.OutputFolder, TestStatisticsFileName) : TestStatisticsFileName;

            Write(metadataPath, compareResults.TestCases);
            File.WriteAllText(statisticsPath, JObject.FromObject(compareResults.Statistics).ToString());

            var failedThresholds = testSettings.Thresholds
                                   .Where(t => !compareResults.Statistics.CheckThreshold(baseline, t))
                                   .ToList();

            if (failedThresholds.Count > 0)
            {
                var failedThresholdsInfo = string.Join(", ", failedThresholds.Select(t => t.GetDescription()));
                Logger.LogWarning($"Performance threshold not met for {failedThresholdsInfo}.");
            }

            return(failedThresholds.Count);
        }
        public TestCaseStreamReadResult Read(TestCaseSource <string> testCaseSource, TestOptions testOptions, TestContext testContext, ITestMethodRunnerCallback callback)
        {
            if (testCaseSource == null)
            {
                throw new ArgumentNullException(nameof(testCaseSource));
            }
            if (testOptions == null)
            {
                throw new ArgumentNullException(nameof(testOptions));
            }
            if (testContext == null)
            {
                throw new ArgumentNullException(nameof(testContext));
            }

            var testCaseStreamReadResult = new TestCaseStreamReadResult();


            var codeCoverageEnabled = testOptions.CoverageOptions.ShouldRunCoverage(testContext.TestFileSettings.CodeCoverageExecutionMode);

            var streamingTestFileContexts = testContext.ReferencedFiles
                                            .Where(x => x.IsFileUnderTest)
                                            .Select(x => new StreamingTestFileContext(x, testContext, codeCoverageEnabled))
                                            .ToList();

            var deferredEvents = new List <Action <StreamingTestFileContext> >();


            if (streamingTestFileContexts.Count == 1)
            {
                currentTestFileContext = streamingTestFileContexts.First();
            }

            testCaseSource.Subscribe((line) => ProcessLine(line, testContext, streamingTestFileContexts, deferredEvents, callback, testOptions.DebugEnabled));

            var readerTask = testCaseSource.Open();

            while ((readerTask.Status == TaskStatus.WaitingToRun ||
                    readerTask.Status == TaskStatus.WaitingForActivation ||
                    readerTask.Status == TaskStatus.Running) && testCaseSource.IsAlive)
            {
                Thread.Sleep(100);
            }

            if (readerTask.IsCompleted)
            {
                ChutzpahTracer.TraceInformation("Finished reading stream from test file '{0}'", testContext.FirstInputTestFile);
                testCaseStreamReadResult.TestFileSummaries = streamingTestFileContexts.Select(x => x.TestFileSummary).ToList();
            }
            else
            {
                // Since we timed out make sure we play the deferred events so we do not lose errors
                // We will just attach these events to the first test context at this point since we do
                // not know where they belong
                PlayDeferredEvents(streamingTestFileContexts.FirstOrDefault(), deferredEvents);

                // We timed out so kill the process and return an empty test file summary
                ChutzpahTracer.TraceError("Test file '{0}' timed out after running for {1} milliseconds", testContext.FirstInputTestFile, (DateTime.Now - testCaseSource.LastTestEvent).TotalMilliseconds);


                testCaseSource.Dispose();
                testCaseStreamReadResult.TimedOut          = true;
                testCaseStreamReadResult.TestFileSummaries = testContext.ReferencedFiles.Where(x => x.IsFileUnderTest).Select(file => new TestFileSummary(file.Path)).ToList();
            }

            return(testCaseStreamReadResult);
        }
Esempio n. 4
0
        public void TransformWorks(TestCaseSource source)
        {
            string result = Transformer.Transform(source.Content, source.Properties);

            Assert.That(result, Is.EqualTo(source.ReplacedContent), $"Transformed content {result} does not match expected {source.ReplacedContent}");
        }