public void ConvertsITestPassed() { TestResult testResult = null; var listener = Substitute.For <ITestListener>(); listener.WhenAny(l => l.TestFinished(null)) .Do <TestResult>(result => testResult = result); var visitor = new ResultVisitor(listener); var message = new TestPassed { TestCase = new TestCase(typeof(string), "Contains"), TestDisplayName = "Display Name", ExecutionTime = 123.45M }; visitor.OnMessage(message); Assert.NotNull(testResult); Assert.Same(typeof(string), testResult.FixtureType); Assert.Equal("Contains", testResult.Method.Name); Assert.Equal("Display Name", testResult.Name); Assert.Equal(TestState.Passed, testResult.State); Assert.Equal(123.45, testResult.TimeSpan.TotalMilliseconds); Assert.Equal(1, testResult.TotalTests); }
public bool QueueMessage(IMessageSinkMessage message) { if (this.invalidConfigurationExpected) { if (message is TestFailed) { // TODO: allow for derived types of allowed exceptions var failedMessage = (TestFailed)message; if (failedMessage.ExceptionTypes.Length > 0 && AllowedFailureExceptionTypes.Any(t => t.FullName == failedMessage.ExceptionTypes[0])) { message = new TestPassed(failedMessage.Test, failedMessage.ExecutionTime, failedMessage.Output); this.InvertedFailures++; } } else if (message is TestPassed) { var passedMessage = (TestPassed)message; message = new TestFailed(passedMessage.Test, passedMessage.ExecutionTime, passedMessage.Output, new AssertActualExpectedException(false, true, "Expected invalid configuration but no exception thrown.")); this.InvertedSuccesses++; } } return(this.inner.QueueMessage(message)); }
private bool CheckIfTestFailedAndFireCallbacks(LightTestFixture fixture, Script script) { var shouldFailTest = TestData.ShouldFailTest; if (LightTestFixture.IsSpecialScript(script) && shouldFailTest) { FixtureSpecialScripFailed?.Invoke(fixture, script); } else if (LightTestFixture.IsSpecialScript(script) && !shouldFailTest) { FixtureSpecialScriptSucceded?.Invoke(fixture, script); } else if (shouldFailTest) { TestFailed?.Invoke(fixture, script); } else { TestPassed?.Invoke(fixture, script); } TestData.ShouldFailTest = false; return(shouldFailTest); }
public AcceptanceTestRunner(IList <Type> tests, TestPassed passHandler, TestFailed failHandler, TestSkipped skipHandler) { if (passHandler == null || failHandler == null || skipHandler == null) { throw new ArgumentNullException("Test result handlers cannot be null."); } _tests = tests ?? throw new ArgumentNullException("Test collection cannot be null."); _testPassedHandler = passHandler; _testFailedHandler = failHandler; _testSkippedHandler = skipHandler; }
public bool QueueMessage(IMessageSinkMessage message) { var testPassedMessage = message as ITestPassed; if (testPassedMessage != null) { var output = string.Format( CultureInfo.InvariantCulture, "Execution time: {0}ms", testPassedMessage.ExecutionTime * 1000); message = new TestPassed(testPassedMessage.Test, testPassedMessage.ExecutionTime, output); } return(this.inner.QueueMessage(message)); }
protected override async Task <RunSummary> RunTestAsync() { var scenarioFactTestCase = (ScenarioFactTestCase)TestCase; var test = CreateTest(TestCase, DisplayName); var aggregatedResult = new RunSummary(); // Theories are called with required arguments. Keep track of what arguments we already tested so that we can skip those accordingly var testedArguments = new HashSet <object>(); // Each time we find a new theory argument, we will want to restart our Test so that we can collect subsequent test cases bool pendingRestart; do { // Safeguarding against abuse if (testedArguments.Count >= scenarioFactTestCase.TheoryTestCaseLimit) { pendingRestart = false; MessageBus.QueueMessage(new TestSkipped(test, "Theory tests are capped to prevent infinite loops. You can configure a different limit by setting TheoryTestCaseLimit on the Scenario attribute")); aggregatedResult.Aggregate(new RunSummary { Skipped = 1, Total = 1 }); } else { var bufferedMessageBus = new BufferedMessageBus(MessageBus); var stopwatch = Stopwatch.StartNew(); var skipAdditionalTests = false; var testRecorded = false; pendingRestart = false; // By default we dont expect a new restart object? capturedArgument = null; ScenarioContext scenarioContext = null; scenarioContext = new ScenarioContext(scenarioFactTestCase.FactName, async(ScenarioTestCaseDescriptor descriptor) => { // If we're hitting our target test if (descriptor.Name == scenarioFactTestCase.FactName) { testRecorded = true; if (skipAdditionalTests) { pendingRestart = true; // when we discovered more tests after a test completed, allow us to restart scenarioContext.EndScenarioConditionally(); return; } if (descriptor.Argument is not null) { // If we've already received this test case, don't run it again if (testedArguments.Contains(descriptor.Argument)) { return; } testedArguments.Add(descriptor.Argument); capturedArgument = descriptor.Argument; } // At this stage we found our first valid test case, any subsequent test case should issue a restart instead skipAdditionalTests = true; try { await descriptor.Invocation(); } catch (Exception) { // If we caught an exception but we're in a theory, we will want to try for additional test cases if (descriptor.Argument is not null) { pendingRestart = true; } throw; } finally { scenarioContext.IsTargetConclusive = true; } } else { // We may be hitting a shared fact, those need to be invoked as well but not recorded as our primary target if (!scenarioFactTestCase.RunInIsolation || descriptor.Flags.HasFlag(ScenarioTestCaseFlags.Shared)) { await descriptor.Invocation(); } } }); scenarioContext.AutoAbort = scenarioFactTestCase.ExecutionPolicy is ScenarioTestExecutionPolicy.EndAfterConclusion; TestMethodArguments = new object[] { scenarioContext }; RunSummary result; result = await CreateTestRunner(test, bufferedMessageBus, TestClass, ConstructorArguments, TestMethod, TestMethodArguments, SkipReason, BeforeAfterAttributes, Aggregator, CancellationTokenSource).RunAsync(); aggregatedResult.Aggregate(result); stopwatch.Stop(); var testInvocationTest = capturedArgument switch { null => CreateTest(TestCase, DisplayName), not null => CreateTest(TestCase, $"{DisplayName} ({capturedArgument})") }; var bufferedMessages = bufferedMessageBus.QueuedMessages; // We should have expected at least one test run. We probably returned before our target test was able to run if (!testRecorded && result.Failed == 0) { bufferedMessageBus.QueueMessage(new TestSkipped(test, scenarioContext.SkippedReason ?? "No applicable tests were able to run")); result = new RunSummary { Skipped = 1, Total = 1 }; } // If we skipped this test, make sure that this is reported accordingly if (scenarioContext.Skipped && !bufferedMessages.OfType <TestSkipped>().Any()) { bufferedMessages = bufferedMessages.Concat(new[] { new TestSkipped(testInvocationTest, scenarioContext.SkippedReason) }); } // If we have indeed skipped this test, make sure that we're not reporting it as passed or failed if (bufferedMessages.OfType <TestSkipped>().Any()) { bufferedMessages = bufferedMessages.Where(x => x is not TestPassed and not TestFailed); } // If we have a failure in post conditions, don't mark this test case as passed if (bufferedMessages.OfType <TestFailed>().Any()) { bufferedMessages = bufferedMessages.Where(x => x is not TestPassed); } var output = string.Join("", bufferedMessages .OfType <ITestOutput>() .Select(x => x.Output)); var duration = (decimal)stopwatch.Elapsed.TotalSeconds; foreach (var queuedMessage in bufferedMessages) { var transformedMessage = queuedMessage switch { TestStarting testStarting => new TestStarting(testInvocationTest), TestSkipped testSkipped => new TestSkipped(testInvocationTest, testSkipped.Reason), TestPassed testPassed => new TestPassed(testInvocationTest, duration, output), TestFailed testFailed => new TestFailed(testInvocationTest, duration, output, testFailed.ExceptionTypes, testFailed.Messages, testFailed.StackTraces, testFailed.ExceptionParentIndices), TestFinished testFinished => new TestFinished(testInvocationTest, duration, output), _ => queuedMessage }; if (!MessageBus.QueueMessage(transformedMessage)) { return(aggregatedResult); } } } }while (pendingRestart); return(aggregatedResult); } } }
public async Task <RunSummary> RunScenarioAsync() { var runSummary = new RunSummary { Total = 1 }; var output = string.Empty; if (!MessageBus.QueueMessage(new TestStarting(Test))) { CancellationTokenSource.Cancel(); } else { AfterTestStarting(); if (!string.IsNullOrEmpty(SkipReason)) { runSummary.Skipped++; if (!MessageBus.QueueMessage(new TestSkipped(Test, SkipReason))) { CancellationTokenSource.Cancel(); } } else { var aggregator = new ExceptionAggregator(Aggregator); if (!aggregator.HasExceptions) { var tuple = await aggregator.RunAsync(() => InvokeTestAsync(aggregator)); runSummary.Time = tuple.Item1; output = tuple.Item2; } var exception = aggregator.ToException(); TestResultMessage testResult; if (exception == null) { testResult = new TestPassed(Test, runSummary.Time, output); } else if (exception is IgnoreException) { testResult = new TestSkipped(Test, exception.Message); runSummary.Skipped++; } else { testResult = new TestFailed(Test, runSummary.Time, output, exception); runSummary.Failed++; } if (!CancellationTokenSource.IsCancellationRequested) { if (!MessageBus.QueueMessage(testResult)) { CancellationTokenSource.Cancel(); } } } Aggregator.Clear(); BeforeTestFinished(); if (Aggregator.HasExceptions) { if (!MessageBus.QueueMessage(new TestCleanupFailure(Test, Aggregator.ToException()))) { CancellationTokenSource.Cancel(); } } } if (!MessageBus.QueueMessage(new TestFinished(Test, runSummary.Time, output))) { CancellationTokenSource.Cancel(); } return(runSummary); }
bool OnTest(XmlNode xml) { var @continue = true; var testCase = FindTestCase(xml.Attributes["type"].Value, xml.Attributes["method"].Value); var timeAttribute = xml.Attributes["time"]; var time = timeAttribute == null ? 0M : Decimal.Parse(timeAttribute.Value, CultureInfo.InvariantCulture); var outputElement = xml.SelectSingleNode("output"); var output = outputElement == null ? String.Empty : outputElement.InnerText; var displayName = xml.Attributes["name"].Value; ITestCaseMessage resultMessage = null; testCaseResults.Total++; testCaseResults.Time += time; switch (xml.Attributes["result"].Value) { case "Pass": resultMessage = new TestPassed(testCase, displayName, time, output); break; case "Fail": { testCaseResults.Failed++; var failure = xml.SelectSingleNode("failure"); resultMessage = new TestFailed(testCase, displayName, time, output, failure.Attributes["exception-type"].Value, failure.SelectSingleNode("message").InnerText, failure.SelectSingleNode("stack-trace").InnerText); break; } case "Skip": testCaseResults.Skipped++; resultMessage = new TestSkipped(testCase, displayName, xml.SelectSingleNode("reason/message").InnerText); break; } if (resultMessage != null) @continue = messageSink.OnMessage(resultMessage) && @continue; @continue = messageSink.OnMessage(new TestFinished(testCase, displayName, time, output)) && @continue; return @continue && TestClassResults.Continue; }
public void ConvertsITestPassed() { TestResult testResult = null; var listener = Substitute.For<ITestListener>(); listener.WhenAny(l => l.TestFinished(null)) .Do<TestResult>(result => testResult = result); var visitor = new ResultVisitor(listener); var message = new TestPassed { TestCase = new TestCase(typeof(string), "Contains"), TestDisplayName = "Display Name", ExecutionTime = 123.45M }; visitor.OnMessage(message); Assert.NotNull(testResult); Assert.Same(typeof(string), testResult.FixtureType); Assert.Equal("Contains", testResult.Method.Name); Assert.Equal("Display Name", testResult.Name); Assert.Equal(TestState.Passed, testResult.State); Assert.Equal(123.45, testResult.TimeSpan.TotalMilliseconds); Assert.Equal(1, testResult.TotalTests); }
public void Handle(TestPassed result) { //All good }
protected override async Task <RunSummary> RunTestAsync() { var test = new XunitTest(TestCase, TestCase.DisplayName); //TODO: this is a pickle, we could use the Compiler/Pickle interfaces from the Gherkin parser var summary = new RunSummary() { Total = 1 }; string output = ""; var gherkinDocument = await this.TestCase.FeatureTypeInfo.GetDocumentAsync(); Scenario scenario = null; if (gherkinDocument.SpecFlowFeature != null) { if (TestCase.IsScenarioOutline) { var scenarioOutline = gherkinDocument.SpecFlowFeature.ScenarioDefinitions.OfType <ScenarioOutline>().FirstOrDefault(s => s.Name == TestCase.Name); if (scenarioOutline != null && SpecFlowParserHelper.GetExampleRowById(scenarioOutline, TestCase.ExampleId, out var example, out var exampleRow)) { scenario = SpecFlowParserHelper.CreateScenario(scenarioOutline, example, exampleRow); } } else { scenario = gherkinDocument.SpecFlowFeature.ScenarioDefinitions.OfType <Scenario>().FirstOrDefault(s => s.Name == TestCase.Name); } } string skipReason = null; if (scenario == null) { skipReason = $"Unable to find Scenario: {TestCase.DisplayName}"; } else if (gherkinDocument.SpecFlowFeature.Tags.GetTags().Concat(scenario.Tags.GetTags()).Contains("ignore")) { skipReason = "Ignored"; } if (skipReason != null) { summary.Skipped++; if (!MessageBus.QueueMessage(new TestSkipped(test, skipReason))) { CancellationTokenSource.Cancel(); } } else { var aggregator = new ExceptionAggregator(Aggregator); if (!aggregator.HasExceptions) { aggregator.Run(() => { var stopwatch = Stopwatch.StartNew(); testOutputHelper.Initialize(MessageBus, test); try { RunScenario(gherkinDocument, scenario); } finally { stopwatch.Stop(); summary.Time = (decimal)stopwatch.Elapsed.TotalSeconds; output = testOutputHelper.Output; testOutputHelper.Uninitialize(); } } ); } var exception = aggregator.ToException(); TestResultMessage testResult; if (exception == null) { testResult = new TestPassed(test, summary.Time, output); } else { testResult = new TestFailed(test, summary.Time, output, exception); summary.Failed++; } if (!CancellationTokenSource.IsCancellationRequested) { if (!MessageBus.QueueMessage(testResult)) { CancellationTokenSource.Cancel(); } } } if (!MessageBus.QueueMessage(new TestFinished(test, summary.Time, output))) { CancellationTokenSource.Cancel(); } return(summary); }
protected override async Task <RunSummary> RunTestAsync() { var test = new XunitTest(TestCase, TestCase.DisplayName); //TODO: this is a pickle, we could use the Compiler/Pickle interfaces from the Gherkin parser var summary = new RunSummary() { Total = 1 }; var output = new StringBuilder(); var gherkinDocument = await SpecFlowParserHelper.ParseSpecFlowDocumentAsync(TestCase.FeatureFile.FeatureFilePath); Scenario scenario = null; if (gherkinDocument.SpecFlowFeature != null) { if (TestCase.IsScenarioOutline) { var scenarioOutline = gherkinDocument.SpecFlowFeature.ScenarioDefinitions.OfType <ScenarioOutline>().FirstOrDefault(s => s.Name == TestCase.Name); Examples example = null; Gherkin.Ast.TableRow exampleRow = null; if (scenarioOutline != null && SpecFlowParserHelper.GetExampleRowById(scenarioOutline, TestCase.ExampleId, out example, out exampleRow)) { scenario = SpecFlowParserHelper.CreateScenario(scenarioOutline, example, exampleRow); } } else { scenario = gherkinDocument.SpecFlowFeature.ScenarioDefinitions.OfType <Scenario>().FirstOrDefault(s => s.Name == TestCase.Name); } } string skipReason = null; if (scenario == null) { skipReason = $"Unable to find Scenario: {TestCase.DisplayName}"; } else if (gherkinDocument.SpecFlowFeature.Tags.GetTags().Concat(scenario.Tags.GetTags()).Contains("ignore")) { skipReason = "Ignored"; } if (skipReason != null) { summary.Skipped++; if (!MessageBus.QueueMessage(new TestSkipped(test, skipReason))) { CancellationTokenSource.Cancel(); } } else { var aggregator = new ExceptionAggregator(Aggregator); if (!aggregator.HasExceptions) { aggregator.Run(() => RunScenario(gherkinDocument, scenario, output)); } var exception = aggregator.ToException(); TestResultMessage testResult; if (exception == null) { testResult = new TestPassed(test, summary.Time, output.ToString()); } else { testResult = new TestFailed(test, summary.Time, output.ToString(), exception); summary.Failed++; } if (!CancellationTokenSource.IsCancellationRequested) { if (!MessageBus.QueueMessage(testResult)) { CancellationTokenSource.Cancel(); } } } if (!MessageBus.QueueMessage(new TestFinished(test, summary.Time, output.ToString()))) { CancellationTokenSource.Cancel(); } return(summary); }
protected override async Task <RunSummary> RunTestAsync() { var output = new TestOutputHelper(); output.Initialize(this.MessageBus, new XunitTest(this.TestCase, this.DisplayName)); await this.Aggregator.RunAsync(() => this.Timer.AggregateAsync( async delegate { var v3DiscoveryModules = this.GetV3DiscoveryModules(); var resultingCatalogs = new List <ComposableCatalog>(v3DiscoveryModules.Count); var assemblies = this.assemblyNames.Select(an => Assembly.Load(new AssemblyName(an))).ToList(); foreach (var discoveryModule in v3DiscoveryModules) { var partsFromTypes = await discoveryModule.CreatePartsAsync(this.parts); var partsFromAssemblies = await discoveryModule.CreatePartsAsync(assemblies); var catalog = TestUtilities.EmptyCatalog .AddParts(partsFromTypes) .AddParts(partsFromAssemblies); resultingCatalogs.Add(catalog); } string[] catalogStringRepresentations = resultingCatalogs.Select(catalog => { var writer = new StringWriter(); catalog.ToString(writer); return(writer.ToString()); }).ToArray(); bool anyStringRepresentationDifferences = false; for (int i = 1; i < resultingCatalogs.Count; i++) { anyStringRepresentationDifferences = PrintDiff( v3DiscoveryModules[0].GetType().Name, v3DiscoveryModules[i].GetType().Name, catalogStringRepresentations[0], catalogStringRepresentations[i], output); } // Verify that the catalogs are identical. // The string compare above should have taken care of this (in a more descriptive way), // but we do this to double-check. var uniqueCatalogs = resultingCatalogs.Distinct().ToArray(); // Fail the test if ComposableCatalog.Equals returns a different result from string comparison. Assert.Equal(anyStringRepresentationDifferences, uniqueCatalogs.Length > 1); if (uniqueCatalogs.Length == 1) { ////output.WriteLine(catalogStringRepresentations[0]); } // For each distinct catalog, create one configuration and verify it meets expectations. var configurations = new List <CompositionConfiguration>(uniqueCatalogs.Length); foreach (var uniqueCatalog in uniqueCatalogs) { var catalogWithSupport = uniqueCatalog #if DESKTOP .WithCompositionService() #endif ; // Round-trip the catalog through serialization to verify that as well. await RoundtripCatalogSerializationAsync(catalogWithSupport, output); var configuration = CompositionConfiguration.Create(catalogWithSupport); if (!this.compositionVersions.HasFlag(CompositionEngines.V3AllowConfigurationWithErrors)) { Assert.Equal(this.expectInvalidConfiguration, !configuration.CompositionErrors.IsEmpty || !catalogWithSupport.DiscoveredParts.DiscoveryErrors.IsEmpty); } // Save the configuration in a property so that the engine test that follows can reuse the work we've done. configurations.Add(configuration); } this.ResultingConfigurations = configurations; })); var test = new XunitTest(this.TestCase, this.DisplayName); var runSummary = new RunSummary { Total = 1, Time = this.Timer.Total }; IMessageSinkMessage testResultMessage; if (this.Aggregator.HasExceptions) { testResultMessage = new TestFailed(test, this.Timer.Total, output.Output, this.Aggregator.ToException()); runSummary.Failed++; } else { testResultMessage = new TestPassed(test, this.Timer.Total, output.Output); this.Passed = true; } if (!this.MessageBus.QueueMessage(testResultMessage)) { this.CancellationTokenSource.Cancel(); } this.Aggregator.Clear(); return(runSummary); }