/// <summary> /// Load a TestPackage for possible execution /// </summary> /// <returns>A TestEngineResult.</returns> protected override TestEngineResult LoadPackage() { // Last chance to catch invalid settings in package, // in case the client runner missed them. ValidatePackageSettings(); // Some files in the top level package may be projects. // Expand them so that they contain subprojects for // each contained assembly. ExpandProjects(); // Use SelectRuntimeFramework for its side effects. // Info will be left behind in the package about // each contained assembly, which will subsequently // be used to determine how to run the assembly. _runtimeService.SelectRuntimeFramework(TestPackage); if (IntPtr.Size == 8 && TestPackage.GetSetting(PackageSettings.ProcessModel, "") == "InProcess" && TestPackage.GetSetting(PackageSettings.RunAsX86, false)) { throw new NUnitEngineException("Cannot run tests in process - a 32 bit process is required."); } _realRunner = TestRunnerFactory.MakeTestRunner(TestPackage); return(_realRunner.Load().Aggregate(TEST_RUN_ELEMENT, TestPackage.Name, TestPackage.FullName)); }
/// <summary> /// Load a TestPackage for possible execution /// </summary> /// <returns>A TestEngineResult.</returns> protected override TestEngineResult LoadPackage() { // Last chance to catch invalid settings in package, // in case the client runner missed them. ValidatePackageSettings(); _realRunner = TestRunnerFactory.MakeTestRunner(TestPackage); return(_realRunner.Load().Aggregate(TEST_RUN_ELEMENT, TestPackage.Name, TestPackage.FullName)); }
/// <summary> /// Creates a new connection to the target database, allowing a test to communicate with other databases. The returned <see cref="ITestRunner"/> must be disposed, so it is recommended to wrap it in a using statement /// </summary> /// <param name="connectionString">The connection string of the database to connect to</param> /// <returns>Returns the <see cref="ITestRunner"/> for the new database</returns> protected async Task <ITestRunner> NewConnectionByConnectionStringAsync(string connectionString) { if (string.IsNullOrWhiteSpace(connectionString)) { throw new ArgumentNullException(nameof(connectionString)); } ITestRunner testRunner = TestRunnerFactory.BuildTestRunner(connectionString); await testRunner.InitialiseAsync(TestFramework); return(testRunner); }
public override void Execute() { Prepare(); using (ITestRunner testRunner = TestRunnerFactory.Create(this)) { testRunner.Writer.Write(this); foreach (TestSuiteRef suiteRef in this.Suites) { IExecutable testSuite = new App.Common.UITest.Suite.TestSuite(suiteRef, this, testRunner); testSuite.Execute(); } } }
static void Main(string[] args) { var testRunnerFactory = new TestRunnerFactory(); var runner = testRunnerFactory.CreateTestRunner(args[0], TimeSpan.Parse(args[2]), args.Length > 3 ? args[3] : null); try { var result = runner.RunTestsAsync(args[1]).Result; System.Console.WriteLine(JsonConvert.SerializeObject(result)); Environment.Exit(0); } catch (Exception ex) { System.Console.WriteLine($"Message: ex.Message || Inner exception message: {ex.InnerException?.Message}"); Environment.Exit(-1); } }
internal static async Task <int> Main(string[] args) { var options = CliParser.Parse(args); if (options == null) { return(1); } var testRunner = TestRunnerFactory.Create( AnsiTerminalFactory.Create(), ProcessRunnerFactory.Create(), LocalTestHistoryFactory.Create(), options); await testRunner.MainLoop(); return(0); }
/// <summary> /// Load a TestPackage for possible execution /// </summary> /// <returns>A TestEngineResult.</returns> protected override TestEngineResult LoadPackage() { // Last chance to catch invalid settings in package, // in case the client runner missed them. ValidatePackageSettings(); // Some files in the top level package may be projects. // Expand them so that they contain subprojects for // each contained assembly. ExpandProjects(); // Use SelectRuntimeFramework for its side effects. // Info will be left behind in the package about // each contained assembly, which will subsequently // be used to determine how to run the assembly. RuntimeService.SelectRuntimeFramework(TestPackage); _realRunner = TestRunnerFactory.MakeTestRunner(TestPackage); return(_realRunner.Load().Aggregate(TEST_RUN_ELEMENT, TestPackage.Name, TestPackage.FullName)); }
public void TestTestPassing() { ITestRunner testRunner = TestRunnerFactory.NewTestRunner(); // The TestRunner cannot be null. Xunit.Assert.NotNull(testRunner); // Submit the job. ITestResult testResult = testRunner.RunTest(testRunner.NewJobRequestBuilder() .AddDriverConfiguration(TestPassingStartHandler.GetDriverConfiguration()) .AddGlobalAssemblyForType(typeof(TestPassingStartHandler)) .SetJobIdentifier("TestPassingTest")); // The TestResult cannot be null. Xunit.Assert.NotNull(testResult); // The TestResult cannot contain a failed assert. Xunit.Assert.True(testResult.AllTestsSucceeded, testResult.FailedTestMessage); // The TestResult cannot contain more than one passed assert. Xunit.Assert.Equal(1, testResult.NumberOfPassedAsserts); }
public void TestTestFailure() { ITestRunner testRunner = TestRunnerFactory.NewTestRunner(); // The TestRunner cannot be null. Xunit.Assert.NotNull(testRunner); // Submit the job. ITestResult testResult = testRunner.RunTest(testRunner.NewJobRequestBuilder() .AddDriverConfiguration(TestFailingStartHandler.GetDriverConfiguration()) .AddGlobalAssemblyForType(typeof(TestFailingStartHandler)) .SetJobIdentifier("TestFailingTest")); // The TestResult cannot be null. Xunit.Assert.NotNull(testResult); // There should be at least 1 failing assert. Xunit.Assert.False(testResult.AllTestsSucceeded, testResult.FailedTestMessage); // Only the expected assert should have failed. Xunit.Assert.Equal(1, testResult.NumberOfFailedAsserts); }
public MutationTestInput Initialize(StrykerOptions options) { // resolve project info var projectInfo = _inputFileResolver.ResolveInput(options); // initial build _initialBuildProcess.InitialBuild(projectInfo.FullFramework, options.BasePath, options.SolutionPath, Path.GetFileName(projectInfo.TestProjectAnalyzerResult.ProjectFilePath)); if (_testRunner == null) { _testRunner = new TestRunnerFactory().Create(options, options.Optimizations, projectInfo); } var input = new MutationTestInput() { ProjectInfo = projectInfo, AssemblyReferences = _assemblyReferenceResolver.LoadProjectReferences(projectInfo.ProjectUnderTestAnalyzerResult.References).ToList(), TestRunner = _testRunner }; return(input); }
protected virtual ITestEngineRunner CreateRunner(TestPackage package) { return(TestRunnerFactory.MakeTestRunner(package)); }
public int Execute(ConsoleOptions options) { TextWriter outWriter = Console.Out; bool redirectOutput = options.output != null && options.output != string.Empty; if (redirectOutput) { StreamWriter outStreamWriter = new StreamWriter(options.output); outStreamWriter.AutoFlush = true; outWriter = outStreamWriter; } TextWriter errorWriter = Console.Error; bool redirectError = options.err != null && options.err != string.Empty; if (redirectError) { StreamWriter errorStreamWriter = new StreamWriter(options.err); errorStreamWriter.AutoFlush = true; errorWriter = errorStreamWriter; } TestPackage package = MakeTestPackage(options); Console.WriteLine("ProcessModel: {0} DomainUsage: {1}", package.Settings.Contains("ProcessModel") ? package.Settings["ProcessModel"] : "Default", package.Settings.Contains("DomainUsage") ? package.Settings["DomainUsage"] : "Default"); Console.WriteLine("Execution Runtime: {0}", package.Settings.Contains("RuntimeFramework") ? package.Settings["RuntimeFramework"] : "Default"); TestRunner testRunner = TestRunnerFactory.MakeTestRunner(package); testRunner.Load(package); try { if (testRunner.Test == null) { testRunner.Unload(); Console.Error.WriteLine("Unable to locate fixture {0}", options.fixture); return(FIXTURE_NOT_FOUND); } EventCollector collector = new EventCollector(options, outWriter, errorWriter); TestFilter testFilter = TestFilter.Empty; if (options.run != null && options.run != string.Empty) { Console.WriteLine("Selected test: " + options.run); testFilter = new SimpleNameFilter(options.run); } if (options.include != null && options.include != string.Empty) { Console.WriteLine("Included categories: " + options.include); TestFilter includeFilter = new CategoryExpression(options.include).Filter; if (testFilter.IsEmpty) { testFilter = includeFilter; } else { testFilter = new AndFilter(testFilter, includeFilter); } } if (options.exclude != null && options.exclude != string.Empty) { Console.WriteLine("Excluded categories: " + options.exclude); TestFilter excludeFilter = new NotFilter(new CategoryExpression(options.exclude).Filter); if (testFilter.IsEmpty) { testFilter = excludeFilter; } else if (testFilter is AndFilter) { ((AndFilter)testFilter).Add(excludeFilter); } else { testFilter = new AndFilter(testFilter, excludeFilter); } } TestResult result = null; string savedDirectory = Environment.CurrentDirectory; TextWriter savedOut = Console.Out; TextWriter savedError = Console.Error; try { result = testRunner.Run(collector, testFilter); } finally { outWriter.Flush(); errorWriter.Flush(); if (redirectOutput) { outWriter.Close(); } if (redirectError) { errorWriter.Close(); } Environment.CurrentDirectory = savedDirectory; Console.SetOut(savedOut); Console.SetError(savedError); } Console.WriteLine(); int returnCode = UNEXPECTED_ERROR; if (result != null) { string xmlOutput = CreateXmlOutput(result); ResultSummarizer summary = new ResultSummarizer(result); if (options.xmlConsole) { Console.WriteLine(xmlOutput); } else { WriteSummaryReport(summary); if (summary.ErrorsAndFailures > 0) { WriteErrorsAndFailuresReport(result); } if (summary.TestsNotRun > 0) { WriteNotRunReport(result); } } // Write xml output here string xmlResultFile = options.xml == null || options.xml == string.Empty ? "TestResult.xml" : options.xml; using (StreamWriter writer = new StreamWriter(xmlResultFile)) { writer.Write(xmlOutput); } returnCode = summary.ErrorsAndFailures; } if (collector.HasExceptions) { collector.WriteExceptions(); returnCode = UNEXPECTED_ERROR; } return(returnCode); } finally { testRunner.Unload(); } }