/// <summary> /// Run tests one test at a time and update results back to framework. /// </summary> /// <param name="testBatches">List of test batches to run</param> /// <param name="runContext">Solution properties</param> /// <param name="frameworkHandle">Unit test framework handle</param> private void RunBoostTests(IEnumerable <TestRun> testBatches, IRunContext runContext, IFrameworkHandle frameworkHandle) { BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(runContext); foreach (TestRun batch in testBatches) { if (_cancelled) { break; } DateTimeOffset start = new DateTimeOffset(DateTime.Now); try { Logger.Info("{0}: -> [{1}]", ((runContext.IsBeingDebugged) ? "Debugging" : "Executing"), string.Join(", ", batch.Tests)); using (TemporaryFile report = new TemporaryFile(batch.Arguments.ReportFile)) using (TemporaryFile log = new TemporaryFile(batch.Arguments.LogFile)) using (TemporaryFile stdout = new TemporaryFile(batch.Arguments.StandardOutFile)) using (TemporaryFile stderr = new TemporaryFile(batch.Arguments.StandardErrorFile)) { Logger.Debug("Working directory: {0}", batch.Arguments.WorkingDirectory ?? "(null)"); Logger.Debug("Report file : {0}", batch.Arguments.ReportFile); Logger.Debug("Log file : {0}", batch.Arguments.LogFile); Logger.Debug("StdOut file : {0}", batch.Arguments.StandardOutFile ?? "(null)"); Logger.Debug("StdErr file : {0}", batch.Arguments.StandardErrorFile ?? "(null)"); // Execute the tests if (ExecuteTests(batch, runContext, frameworkHandle)) { foreach (VSTestResult result in GenerateTestResults(batch, start, settings)) { // Identify test result to Visual Studio Test framework frameworkHandle.RecordResult(result); } } } } catch (Boost.Runner.TimeoutException ex) { foreach (VSTestCase testCase in batch.Tests) { VSTestResult testResult = GenerateTimeoutResult(testCase, ex); testResult.StartTime = start; frameworkHandle.RecordResult(testResult); } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while running test batch {0} [{1}] ({2})", batch.Source, string.Join(", ", batch.Tests), ex.Message); } } }
/// <summary> /// Execute the tests one by one. Run Selected /// </summary> /// <param name="tests">Testcases object</param> /// <param name="runContext">Solution properties</param> /// <param name="frameworkHandle">Unit test framework handle</param> /// <remarks>Entry point of the execution procedure whenever the user requests to run one or a specific lists of tests</remarks> public void RunTests(IEnumerable <VSTestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { Code.Require(tests, "tests"); Code.Require(runContext, "runContext"); Code.Require(frameworkHandle, "frameworkHandle"); SetUp(frameworkHandle); Logger.Debug("IRunContext.IsDataCollectionEnabled: {0}", runContext.IsDataCollectionEnabled); Logger.Debug("IRunContext.RunSettings.SettingsXml: {0}", runContext.RunSettings.SettingsXml); BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(runContext); // Batch tests into grouped runs based on test source and test suite so that we minimize symbol reloading // // NOTE Required batching at test suite level since Boost Unit Test Framework command-line arguments only allow // multiple test name specification for tests which reside in the same test suite // // NOTE For code-coverage speed is given preference over adapter responsiveness. TestBatch.Strategy strategy = ((runContext.IsDataCollectionEnabled) ? TestBatch.Strategy.TestSuite : settings.TestBatchStrategy); // Source strategy is invalid in such context since explicit tests are chosen. TestSuite is used instead. if (strategy == Strategy.Source) { strategy = Strategy.TestSuite; } ITestBatchingStrategy batchStrategy = GetBatchStrategy(strategy, settings, runContext); if (batchStrategy == null) { Logger.Error("No valid test batching strategy was found. Tests skipped."); } else { // NOTE Apply distinct to avoid duplicate test cases. Common issue when using BOOST_DATA_TEST_CASE. IEnumerable <TestRun> batches = batchStrategy.BatchTests(tests.Distinct(new TestCaseComparer())); RunBoostTests(batches, runContext, frameworkHandle); } TearDown(); }
public void DiscoverTests(IEnumerable <string> sources, IDiscoveryContext discoveryContext, ITestCaseDiscoverySink discoverySink) { if (sources == null) { return; } BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(discoveryContext); try { // Filter out any sources which are not of interest if (!TestSourceFilter.IsNullOrEmpty(settings.Filters)) { sources = sources.Where(settings.Filters.ShouldInclude); } var results = _boostTestDiscovererFactory.GetDiscoverers(sources.ToList(), settings); if (results == null) { return; } // Test discovery foreach (var discoverer in results) { if (discoverer.Sources.Count > 0) { Logger.Info("Discovering ({0}): -> [{1}]", discoverer.Discoverer.GetType().Name, string.Join(", ", discoverer.Sources)); discoverer.Discoverer.DiscoverTests(discoverer.Sources, discoveryContext, discoverySink); } } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while discovering tests: {0} ({1})", ex.Message, ex.HResult); } }
public void DiscoverTests(IEnumerable <string> sources, IDiscoveryContext discoveryContext, ITestCaseDiscoverySink discoverySink) { Code.Require(sources, "sources"); Code.Require(discoverySink, "discoverySink"); BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(discoveryContext); _sourceFilters = SourceFilterFactory.Get(settings); IDictionary <string, ProjectInfo> solutioninfo = null; var numberOfAttempts = 100; // try several times to overcome "Application is Busy" COMException while (numberOfAttempts > 0) { try { solutioninfo = PrepareTestCaseData(sources); // set numberOfAttempts = 0, because there is no need to try again, // since obviously no exception was thrown at this point numberOfAttempts = 0; } catch (COMException) { --numberOfAttempts; // re-throw after all attempts have failed if (numberOfAttempts == 0) { throw; } } } GetBoostTests(solutioninfo, discoverySink); }
/// <summary> /// Execute the tests one by one. Run All. /// </summary> /// <param name="sources">Collection of test modules (exe/dll)</param> /// <param name="runContext">Solution properties</param> /// <param name="frameworkHandle">Unit test framework handle</param> /// <remarks>Entry point of the execution procedure whenever the user requests to run all the tests</remarks> public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { Code.Require(sources, "sources"); Code.Require(runContext, "runContext"); Code.Require(frameworkHandle, "frameworkHandle"); SetUp(frameworkHandle); Logger.Debug("IRunContext.IsDataCollectionEnabled: {0}", runContext.IsDataCollectionEnabled); Logger.Debug("IRunContext.RunSettings.SettingsXml: {0}", runContext.RunSettings.SettingsXml); BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(runContext); foreach (string source in sources) { if (_cancelled) { break; } var discoverer = _boostTestDiscovererFactory.GetDiscoverer(source, settings); if (discoverer != null) { try { DefaultTestCaseDiscoverySink sink = new DefaultTestCaseDiscoverySink(); // NOTE IRunContext implements IDiscoveryContext // NOTE IFrameworkHandle implements IMessageLogger // Re-discover tests so that we could make use of the RunTests overload which takes an enumeration of test cases. // This is necessary since we need to run tests one by one in order to have the test adapter remain responsive // and have a list of tests over which we can generate test results for. discoverer.DiscoverTests(new[] { source }, runContext, sink); // The following ensures that only test cases that are not disabled are run when the user presses "Run all" // This, however, can be overridden by the .runsettings file supplied IEnumerable <TestCase> testsToRun = GetTestsToRun(settings, sink.Tests); // Batch tests into grouped runs based by source so that we avoid reloading symbols per test run // Batching by source since this overload is called when 'Run All...' or equivalent is triggered // NOTE For code-coverage speed is given preference over adapter responsiveness. TestBatch.Strategy strategy = ((runContext.IsDataCollectionEnabled) ? TestBatch.Strategy.Source : settings.TestBatchStrategy); ITestBatchingStrategy batchStrategy = GetBatchStrategy(strategy, settings, runContext); if (batchStrategy == null) { Logger.Error("No valid test batching strategy was found for {0}. Source skipped.", source); continue; } IEnumerable <TestRun> batches = batchStrategy.BatchTests(testsToRun); // Delegate to the RunBoostTests overload which takes an enumeration of test batches RunBoostTests(batches, runContext, frameworkHandle); } catch (Exception ex) { Logger.Error("Exception caught while running tests from {0} ({1})", source, ex.Message); } } } TearDown(); }
public void DiscoverTests(IEnumerable <string> sources, IDiscoveryContext discoveryContext, ITestCaseDiscoverySink discoverySink) { Code.Require(sources, "sources"); Code.Require(discoverySink, "discoverySink"); // Populate loop-invariant attributes and settings BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(discoveryContext); BoostTestRunnerFactoryOptions options = new BoostTestRunnerFactoryOptions() { ExternalTestRunnerSettings = settings.ExternalTestRunner }; BoostTestRunnerSettings runnerSettings = new BoostTestRunnerSettings() { Timeout = settings.DiscoveryTimeoutMilliseconds }; BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { ListContent = ListContentFormat.DOT }; foreach (var source in sources) { try { args.SetWorkingEnvironment(source, settings, ((_vsProvider == null) ? null : _vsProvider.Instance)); } catch (COMException ex) { Logger.Exception(ex, "Could not retrieve WorkingDirectory from Visual Studio Configuration"); } try { IBoostTestRunner runner = _factory.GetRunner(source, options); using (TemporaryFile output = new TemporaryFile(TestPathGenerator.Generate(source, ".list.content.gv"))) { // --list_content output is redirected to standard error args.StandardErrorFile = output.Path; Logger.Debug("list_content file: {0}", args.StandardErrorFile); runner.Run(args, runnerSettings); // Parse --list_content=DOT output using (FileStream stream = File.OpenRead(args.StandardErrorFile)) { TestFrameworkDOTDeserialiser deserialiser = new TestFrameworkDOTDeserialiser(source); // Pass in a visitor to avoid a 2-pass loop in order to notify test cases to VS // // NOTE Due to deserialisation, make sure that only test cases are visited. Test // suites may be visited after their child test cases are visited. deserialiser.Deserialise(stream, new VSDiscoveryVisitorTestsOnly(source, discoverySink)); } } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while discovering tests for {0} ({1} - {2})", source, ex.Message, ex.HResult); } } }
public void DiscoverTests(IEnumerable <string> sources, IDiscoveryContext discoveryContext, ITestCaseDiscoverySink discoverySink) { Code.Require(sources, "sources"); Code.Require(discoverySink, "discoverySink"); // Populate loop-invariant attributes and settings BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(discoveryContext); BoostTestRunnerSettings runnerSettings = new BoostTestRunnerSettings() { Timeout = settings.DiscoveryTimeoutMilliseconds }; BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { ListContent = ListContentFormat.DOT }; foreach (var source in sources) { try { var vs = _vsProvider?.Instance; if (vs != null) { Logger.Debug("Connected to Visual Studio {0} instance", vs.Version); } args.SetWorkingEnvironment(source, settings, vs); } catch (ROTException ex) { Logger.Exception(ex, "Could not retrieve WorkingDirectory from Visual Studio Configuration"); } catch (COMException ex) { Logger.Exception(ex, "Could not retrieve WorkingDirectory from Visual Studio Configuration"); } try { IBoostTestRunner runner = _factory.GetRunner(source, settings.TestRunnerFactoryOptions); using (TemporaryFile output = new TemporaryFile(TestPathGenerator.Generate(source, ".list.content.gv"))) { // --list_content output is redirected to standard error args.StandardErrorFile = output.Path; Logger.Debug("list_content file: {0}", args.StandardErrorFile); int resultCode = EXIT_SUCCESS; using (var context = new DefaultProcessExecutionContext()) { resultCode = runner.Execute(args, runnerSettings, context); } // Skip sources for which the --list_content file is not available if (!File.Exists(args.StandardErrorFile)) { Logger.Error("--list_content=DOT output for {0} is not available. Skipping.", source); continue; } // If the executable failed to exit with an EXIT_SUCCESS code, skip source and notify user accordingly if (resultCode != EXIT_SUCCESS) { Logger.Error("--list_content=DOT for {0} failed with exit code {1}. Skipping.", source, resultCode); continue; } // Parse --list_content=DOT output using (FileStream stream = File.OpenRead(args.StandardErrorFile)) { TestFrameworkDOTDeserialiser deserialiser = new TestFrameworkDOTDeserialiser(source); TestFramework framework = deserialiser.Deserialise(stream); if ((framework != null) && (framework.MasterTestSuite != null)) { framework.MasterTestSuite.Apply(new VSDiscoveryVisitor(source, GetVersion(runner), discoverySink)); } } } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while discovering tests for {0} ({1} - {2})", source, ex.Message, ex.HResult); } } }