public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { BoostTestRunnerSettings adaptedSettings = this.Settings.TestRunnerSettings.Clone(); adaptedSettings.Timeout = -1; // Group by source IEnumerable <IGrouping <string, VSTestCase> > sources = tests.GroupBy(test => test.Source); foreach (IGrouping <string, VSTestCase> source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } // Group by test suite var suiteGroups = source.GroupBy(test => test.Traits.First(trait => (trait.Name == VSTestModel.TestSuiteTrait)).Value); foreach (var suiteGroup in suiteGroups) { BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(source.Key); foreach (VSTestCase test in suiteGroup) { // List all tests by display name // but ensure that the first test is fully qualified so that remaining tests are taken relative to this test suite args.Tests.Add((args.Tests.Count == 0) ? test.FullyQualifiedName : test.DisplayName); } yield return(new TestRun(runner, suiteGroup, args, adaptedSettings)); } } }
/// <summary> /// Identify the version (if possible) of the Boost.Test module /// </summary> /// <param name="runner">The Boost.Test module</param> /// <returns>The Boost version of the Boost.Test module or the empty string if the version cannot be retrieved</returns> private static string GetVersion(IBoostTestRunner runner) { if (!runner.VersionSupported) { return(string.Empty); } using (TemporaryFile output = new TemporaryFile(TestPathGenerator.Generate(runner.Source, ".version.stderr.log"))) { BoostTestRunnerSettings settings = new BoostTestRunnerSettings(); BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { Version = true, StandardErrorFile = output.Path }; int resultCode = EXIT_SUCCESS; using (var context = new DefaultProcessExecutionContext()) { resultCode = runner.Execute(args, settings, context); } if (resultCode != EXIT_SUCCESS) { Logger.Error("--version for {0} failed with exit code {1}. Skipping.", runner.Source, resultCode); return(string.Empty); } var info = File.ReadAllText(args.StandardErrorFile, System.Text.Encoding.ASCII); var match = _versionPattern.Match(info); return((match.Success) ? match.Groups[1].Value : string.Empty); } }
public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { BoostTestRunnerSettings adaptedSettings = this.Settings.TestRunnerSettings.Clone(); // Disable timeout since this batching strategy executes more than one test at a time adaptedSettings.Timeout = -1; // Group by source IEnumerable <IGrouping <string, VSTestCase> > sources = tests.GroupBy(test => test.Source); foreach (var source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } // Start by batching tests by TestSuite var batch = _fallBackStrategy.BatchTests(source); // If the Boost.Test module supports test run filters... if (source.Select(GetVersion).All(version => (version >= _minimumVersion))) { BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(source.Key); // Generate the filter set var filterSet = new List <TestFilter>(); foreach (var run in batch) { TestFilter filter = TestFilter.EnableFilter(); // Use the command-line representation of the test suite batch to allow // for the most compact representation (i.e. fully/qualified/test_name_0,test_name_1,test_name_2) filter.TestSet = new PathTestSet(run.Arguments.Tests); filterSet.Add(filter); } // Use the environment variable rather than the command-line '--run_test' to make proper use of test run filters args.Environment["BOOST_TEST_RUN_FILTERS"] = string.Join(":", filterSet); yield return(new TestRun(runner, source, args, adaptedSettings)); } // Else fall-back to regular test suite batching behaviour... else { foreach (var run in batch) { yield return(run); } } } }
private void Execute(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings) { this.Args.Add(args); this.Settings.Add(settings); Assert.That(args.ReportFile, Is.Not.Null); Assert.That(args.ReportFormat, Is.EqualTo(OutputFormat.XML)); Assert.That(args.LogFile, Is.Not.Null); Assert.That(args.LogFormat, Is.EqualTo(OutputFormat.XML)); string temp = Path.GetDirectoryName(Path.GetTempPath()); Assert.That(Path.GetDirectoryName(args.ReportFile), Is.EqualTo(temp)); Assert.That(Path.GetDirectoryName(args.LogFile), Is.EqualTo(temp)); if (!string.IsNullOrEmpty(args.StandardOutFile)) { Assert.That(Path.GetDirectoryName(args.StandardOutFile), Is.EqualTo(temp)); } if (!string.IsNullOrEmpty(args.StandardErrorFile)) { Assert.That(Path.GetDirectoryName(args.StandardErrorFile), Is.EqualTo(temp)); } // Create empty result files just in case we are running via the source batching strategy Copy("BoostTestAdapterNunit.Resources.ReportsLogs.Empty.sample.test.log.xml", args.LogFile); Copy("BoostTestAdapterNunit.Resources.ReportsLogs.Empty.sample.test.report.xml", args.ReportFile); // Copy the default result files to a temporary location so that they can eventually be read as a TestResultCollection foreach (string test in args.Tests) { TestResources resources = Parent.TestResourceProvider(this.Source, test); Copy(resources.ReportFilePath, args.ReportFile); Copy(resources.LogFilePath, args.LogFile); } }
public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { BoostTestRunnerSettings adaptedSettings = this.Settings.TestRunnerSettings.Clone(); adaptedSettings.Timeout = -1; // Group by source var sources = tests.GroupBy(test => test.Source); foreach (var source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(source.Key); // NOTE the --run_test command-line arg is left empty so that all tests are executed yield return(new TestRun(runner, source, args, adaptedSettings)); } }
public int Execute(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings, IProcessExecutionContext context) { Copy("BoostTestAdapterNunit.Resources.ListContentDOT.sample.8.list.content.gv", args.StandardErrorFile); return(0); }
/// <summary> /// Constructor /// </summary> /// <param name="runner">The IBoostTestRunner which will be used to run the tests</param> /// <param name="tests">The Visual Studio test cases which will be executed</param> /// <param name="args">The command-line arguments for the IBoostTestRunner representing the Visual Studio test cases</param> /// <param name="settings">Additional settings required for correct configuration of the test runner</param> public TestRun(IBoostTestRunner runner, IEnumerable <VSTestCase> tests, BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings) { this.Runner = runner; this.Tests = tests; this.Arguments = args; this.Settings = settings; }
public void DiscoverTests(IEnumerable <string> sources, IDiscoveryContext discoveryContext, ITestCaseDiscoverySink discoverySink) { Code.Require(sources, "sources"); Code.Require(discoverySink, "discoverySink"); // Populate loop-invariant attributes and settings BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(discoveryContext); BoostTestRunnerFactoryOptions options = new BoostTestRunnerFactoryOptions() { ExternalTestRunnerSettings = settings.ExternalTestRunner }; BoostTestRunnerSettings runnerSettings = new BoostTestRunnerSettings() { Timeout = settings.DiscoveryTimeoutMilliseconds }; BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { ListContent = ListContentFormat.DOT }; foreach (var source in sources) { try { args.SetWorkingEnvironment(source, settings, ((_vsProvider == null) ? null : _vsProvider.Instance)); } catch (COMException ex) { Logger.Exception(ex, "Could not retrieve WorkingDirectory from Visual Studio Configuration"); } try { IBoostTestRunner runner = _factory.GetRunner(source, options); using (TemporaryFile output = new TemporaryFile(TestPathGenerator.Generate(source, ".list.content.gv"))) { // --list_content output is redirected to standard error args.StandardErrorFile = output.Path; Logger.Debug("list_content file: {0}", args.StandardErrorFile); runner.Run(args, runnerSettings); // Parse --list_content=DOT output using (FileStream stream = File.OpenRead(args.StandardErrorFile)) { TestFrameworkDOTDeserialiser deserialiser = new TestFrameworkDOTDeserialiser(source); // Pass in a visitor to avoid a 2-pass loop in order to notify test cases to VS // // NOTE Due to deserialisation, make sure that only test cases are visited. Test // suites may be visited after their child test cases are visited. deserialiser.Deserialise(stream, new VSDiscoveryVisitorTestsOnly(source, discoverySink)); } } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while discovering tests for {0} ({1} - {2})", source, ex.Message, ex.HResult); } } }
public void Run(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings) { Execute(args, settings); }
public void Debug(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings, IFrameworkHandle framework) { this.DebugExecution = true; Execute(args, settings); }
public void DiscoverTests(IEnumerable <string> sources, IDiscoveryContext discoveryContext, ITestCaseDiscoverySink discoverySink) { Code.Require(sources, "sources"); Code.Require(discoverySink, "discoverySink"); // Populate loop-invariant attributes and settings BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(discoveryContext); BoostTestRunnerSettings runnerSettings = new BoostTestRunnerSettings() { Timeout = settings.DiscoveryTimeoutMilliseconds }; BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { ListContent = ListContentFormat.DOT }; foreach (var source in sources) { try { var vs = _vsProvider?.Instance; if (vs != null) { Logger.Debug("Connected to Visual Studio {0} instance", vs.Version); } args.SetWorkingEnvironment(source, settings, vs); } catch (ROTException ex) { Logger.Exception(ex, "Could not retrieve WorkingDirectory from Visual Studio Configuration"); } catch (COMException ex) { Logger.Exception(ex, "Could not retrieve WorkingDirectory from Visual Studio Configuration"); } try { IBoostTestRunner runner = _factory.GetRunner(source, settings.TestRunnerFactoryOptions); using (TemporaryFile output = new TemporaryFile(TestPathGenerator.Generate(source, ".list.content.gv"))) { // --list_content output is redirected to standard error args.StandardErrorFile = output.Path; Logger.Debug("list_content file: {0}", args.StandardErrorFile); int resultCode = EXIT_SUCCESS; using (var context = new DefaultProcessExecutionContext()) { resultCode = runner.Execute(args, runnerSettings, context); } // Skip sources for which the --list_content file is not available if (!File.Exists(args.StandardErrorFile)) { Logger.Error("--list_content=DOT output for {0} is not available. Skipping.", source); continue; } // If the executable failed to exit with an EXIT_SUCCESS code, skip source and notify user accordingly if (resultCode != EXIT_SUCCESS) { Logger.Error("--list_content=DOT for {0} failed with exit code {1}. Skipping.", source, resultCode); continue; } // Parse --list_content=DOT output using (FileStream stream = File.OpenRead(args.StandardErrorFile)) { TestFrameworkDOTDeserialiser deserialiser = new TestFrameworkDOTDeserialiser(source); TestFramework framework = deserialiser.Deserialise(stream); if ((framework != null) && (framework.MasterTestSuite != null)) { framework.MasterTestSuite.Apply(new VSDiscoveryVisitor(source, GetVersion(runner), discoverySink)); } } } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while discovering tests for {0} ({1} - {2})", source, ex.Message, ex.HResult); } } }
public void Run(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings) { Copy("BoostTestAdapterNunit.Resources.ListContentDOT.sample.8.list.content.gv", args.StandardErrorFile); }
public void Debug(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings, IFrameworkHandle framework) { throw new NotImplementedException(); }