public void CanStartAndEndBehaviorSpecification() { var depot = new RemoteTaskDepot(new RemoteTask[] { RemoteTaskFixtures.Context, RemoteTaskFixtures.Behavior1, RemoteTaskFixtures.Behavior1Specification1 }); var cache = new ElementCache(new ISpecificationElement[] { ElementFixtures.Behavior1Specification1 }); var sink = Substitute.For <ITestExecutionSink>(); var context = new RunContext(depot, sink); var listener = new TestExecutionListener(context, cache, CancellationToken.None); listener.OnContextStart(ElementFixtures.Context); listener.OnBehaviorStart(ElementFixtures.Behavior1); listener.OnSpecificationStart(ElementFixtures.Behavior1Specification1); listener.OnSpecificationEnd(ElementFixtures.Behavior1Specification1, string.Empty, new TestRunResult(TestStatus.Passing)); listener.OnBehaviorEnd(ElementFixtures.Behavior1, string.Empty); listener.OnContextEnd(ElementFixtures.Context, string.Empty); sink.Received(1).TestStarting(RemoteTaskFixtures.Context); sink.Received(1).TestStarting(RemoteTaskFixtures.Behavior1); sink.Received(1).TestStarting(RemoteTaskFixtures.Behavior1Specification1); sink.Received(1).TestFinished(RemoteTaskFixtures.Behavior1Specification1, Arg.Any <string>(), TestResult.Success); sink.Received(1).TestFinished(RemoteTaskFixtures.Behavior1, Arg.Any <string>(), TestResult.Success); sink.Received(1).TestFinished(RemoteTaskFixtures.Context, Arg.Any <string>(), TestResult.Success); }
public override void TearDown() { try { base.TearDown(); } finally { TestExecutionListener.Reset(); } }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: @Override protected void tearDown() throws Exception protected internal override void tearDown() { try { base.tearDown(); } finally { TestExecutionListener.reset(); } }
void SetupLabelOutput(TestExecutionListener listener) { var labels = _options.DisplayTestLabels != null ? _options.DisplayTestLabels.ToUpperInvariant() : "ON"; listener.TestStarted += (sender, args) => { if (labels == "ALL") { WriteLabelLine(args.TestName); } }; listener.TestFinished += (sender, args) => { if (args.TestOutput != null) { if (labels == "ON") { WriteLabelLine(args.TestName); } WriteOutputLine(args.TestOutput); } }; listener.SuiteFinished += (sender, args) => { if (args.TestOutput != null) { if (labels == "ON" || labels == "ALL") { WriteLabelLine(args.TestName); } WriteOutputLine(args.TestOutput); } }; listener.TestOutput += (sender, args) => { if (labels == "ON" && args.TestName != null) { WriteLabelLine(args.TestName); } WriteOutputLine(args.TestOutput, args.Stream == "Error" ? ColorStyle.Error : ColorStyle.Output); }; }
public void CanStartAndEndContext() { var depot = new RemoteTaskDepot(new RemoteTask[] { RemoteTaskFixtures.Context }); var cache = new ElementCache(Array.Empty <ISpecificationElement>()); var sink = Substitute.For <ITestExecutionSink>(); var context = new RunContext(depot, sink); var listener = new TestExecutionListener(context, cache, CancellationToken.None); listener.OnContextStart(ElementFixtures.Context); listener.OnContextEnd(ElementFixtures.Context, string.Empty); sink.Received(1).TestStarting(RemoteTaskFixtures.Context); sink.Received(1).TestFinished(RemoteTaskFixtures.Context, Arg.Any <string>(), TestResult.Success); }
int Execute() { DisplayRuntimeEnvironment(); DisplayTestFiles(); IEnumerable <string> testList = SetupSinks(); IDictionary <string, object> settings = GetTestSettings(); // We display the filters at this point so that any exception message // thrown by CreateTestFilter will be understandable. DisplayTestFilters(); // Apply filters and merge with testList var filter = CreateTestFilter(testList); var summary = new ResultSummary(); // Load the test framework foreach (var assembly in _options.InputFiles) { // TODO: Load async var assemblyPath = System.IO.Path.GetFullPath(assembly); var testAssembly = LoadAssembly(assemblyPath); var frameworkPath = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(assemblyPath), "nunit.framework.dll"); var framework = LoadAssembly(frameworkPath); var driver = new NUnitPortableDriver(); var result = driver.Load(framework, testAssembly, settings); // TODO: Run async // Explore or Run if (_options.List || _options.Explore) { ITestListener listener = new TestExploreListener(_testDiscoverySink, _options, assemblyPath); string xml = driver.Explore(filter.Text); listener.OnTestEvent(xml); summary.AddResult(xml); } else { var tcListener = new TeamCityEventListener(); TestExecutionListener listener = new TestExecutionListener(_testExecutionSink, _options, assemblyPath); SetupLabelOutput(listener); string xml = driver.Run( report => { listener.OnTestEvent(report); if (_options.TeamCity) { tcListener.OnTestEvent(report); } }, filter.Text); summary.AddResult(xml); } } if (_options.List || _options.Explore) { if (_options.DesignTime) { _testDiscoverySink.SendTestCompleted(); } return(ReturnCodes.OK); } if (_options.DesignTime) { _testExecutionSink.SendTestCompleted(); return(ReturnCodes.OK); } // Summarize and save test results var reporter = new ResultReporter(summary, ColorConsole, _options); reporter.ReportResults(); // Save out the TestResult.xml SaveTestResults(reporter.TestResults); if (summary.UnexpectedError) { return(ReturnCodes.UNEXPECTED_ERROR); } if (summary.InvalidAssemblies > 0) { return(ReturnCodes.INVALID_ASSEMBLY); } if (summary.InvalidTestFixtures > 0) { return(ReturnCodes.INVALID_TEST_FIXTURE); } // Return the number of test failures return(summary.FailedCount); }
public void OneTimeSetUp() { _sink = new Mocks.MockTestExecutionSink(); _listener = new TestExecutionListener(_sink, new CommandLineOptions("--designtime"), @"\src"); }