public void CloneCommandLineArgs() { BoostTestRunnerCommandLineArgs args = GenerateCommandLineArgs(); BoostTestRunnerCommandLineArgs clone = args.Clone(); Assert.That(args.Tests, Is.EqualTo(clone.Tests)); Assert.That(args.WorkingDirectory, Is.EqualTo(clone.WorkingDirectory)); Assert.That(args.LogFile, Is.EqualTo(clone.LogFile)); Assert.That(args.LogFormat, Is.EqualTo(clone.LogFormat)); Assert.That(args.LogLevel, Is.EqualTo(clone.LogLevel)); Assert.That(args.ReportFile, Is.EqualTo(clone.ReportFile)); Assert.That(args.ReportFormat, Is.EqualTo(clone.ReportFormat)); Assert.That(args.ReportLevel, Is.EqualTo(clone.ReportLevel)); Assert.That(args.DetectMemoryLeaks, Is.EqualTo(clone.DetectMemoryLeaks)); Assert.That(args.StandardErrorFile, Is.EqualTo(clone.StandardErrorFile)); Assert.That(args.StandardOutFile, Is.EqualTo(clone.StandardOutFile)); Assert.That(args.ShowProgress, Is.EqualTo(clone.ShowProgress)); Assert.That(args.BuildInfo, Is.EqualTo(clone.BuildInfo)); Assert.That(args.AutoStartDebug, Is.EqualTo(clone.AutoStartDebug)); Assert.That(args.CatchSystemErrors, Is.EqualTo(clone.CatchSystemErrors)); Assert.That(args.BreakExecPath, Is.EqualTo(clone.BreakExecPath)); Assert.That(args.ColorOutput, Is.EqualTo(clone.ColorOutput)); Assert.That(args.ResultCode, Is.EqualTo(clone.ResultCode)); Assert.That(args.Random, Is.EqualTo(clone.Random)); Assert.That(args.UseAltStack, Is.EqualTo(clone.UseAltStack)); Assert.That(args.DetectFPExceptions, Is.EqualTo(clone.DetectFPExceptions)); Assert.That(args.SavePattern, Is.EqualTo(clone.SavePattern)); Assert.That(args.ListContent, Is.EqualTo(clone.ListContent)); Assert.That(args.ToString(), Is.EqualTo(clone.ToString())); }
/// <summary> /// Generates a command line args instance with pre-determined values. /// </summary> /// <returns>A new BoostTestRunnerCommandLineArgs instance populated with pre-determined values.</returns> private static BoostTestRunnerCommandLineArgs GenerateCommandLineArgs() { BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs(); args.Tests.Add("test"); args.Tests.Add("suite/*"); args.LogFormat = OutputFormat.XML; args.LogLevel = LogLevel.TestSuite; args.LogFile = GenerateFullyQualifiedPath("log.xml"); args.ReportFormat = OutputFormat.XML; args.ReportLevel = ReportLevel.Detailed; args.ReportFile = GenerateFullyQualifiedPath("report.xml"); args.DetectMemoryLeaks = 0; args.CatchSystemErrors = false; args.DetectFPExceptions = true; args.StandardOutFile = GenerateFullyQualifiedPath("stdout.log"); args.StandardErrorFile = GenerateFullyQualifiedPath("stderr.log"); return(args); }
/// <summary> /// Sets the working environment (i.e. WorkingDirectory and Environment) properties of the command line arguments /// based on the provided details /// </summary> /// <param name="args">The arguments which to set</param> /// <param name="source">The base source which will be executed</param> /// <param name="settings">The BoostTestAdapterSettings which are currently applied</param> /// <param name="vs">The current Visual Studio instance (if available)</param> /// <exception cref="COMException"></exception> public static void SetWorkingEnvironment(this BoostTestRunnerCommandLineArgs args, string source, BoostTestAdapterSettings settings, IVisualStudio vs) { Code.Require(args, "args"); Code.Require(source, "source"); Code.Require(settings, "settings"); // Default working directory args.WorkingDirectory = Path.GetDirectoryName(source); // Working directory extracted from test settings if (!string.IsNullOrEmpty(settings.WorkingDirectory) && Directory.Exists(settings.WorkingDirectory)) { args.WorkingDirectory = settings.WorkingDirectory; } if (vs != null) { // Visual Studio configuration has higher priority over settings (if available) IVSDebugConfiguration vsConfiguration = LocateVSDebugConfiguration(source, vs); if (vsConfiguration != null) { args.WorkingDirectory = vsConfiguration.WorkingDirectory; args.SetEnvironment(vsConfiguration.Environment); } } // Enforce windows style backward slashes args.WorkingDirectory = args.WorkingDirectory.Replace('/', '\\'); }
public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { BoostTestRunnerSettings adaptedSettings = this.Settings.TestRunnerSettings.Clone(); adaptedSettings.Timeout = -1; // Group by source IEnumerable <IGrouping <string, VSTestCase> > sources = tests.GroupBy(test => test.Source); foreach (IGrouping <string, VSTestCase> source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } // Group by test suite var suiteGroups = source.GroupBy(test => test.Traits.First(trait => (trait.Name == VSTestModel.TestSuiteTrait)).Value); foreach (var suiteGroup in suiteGroups) { BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(source.Key); foreach (VSTestCase test in suiteGroup) { // List all tests by display name // but ensure that the first test is fully qualified so that remaining tests are taken relative to this test suite args.Tests.Add((args.Tests.Count == 0) ? test.FullyQualifiedName : test.DisplayName); } yield return(new TestRun(runner, suiteGroup, args, adaptedSettings)); } } }
/// <summary> /// Constructor /// </summary> /// <param name="runner">The IBoostTestRunner which will be used to run the tests</param> /// <param name="tests">The Visual Studio test cases which will be executed</param> /// <param name="args">The command-line arguments for the IBoostTestRunner representing the Visual Studio test cases</param> /// <param name="settings">Additional settings required for correct configuration of the test runner</param> public TestRun(IBoostTestRunner runner, IEnumerable <VSTestCase> tests, BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings) { this.Runner = runner; this.Tests = tests; this.Arguments = args; this.Settings = settings; }
/// <summary> /// Parses the Xml report and log file as specified within the provided /// BoostTestRunnerCommandLineArgs instance. /// </summary> /// <param name="args">The BoostTestRunnerCommandLineArgs which specify the report and log file.</param> /// <param name="settings">The BoostTestAdapterSettings which specify adapter specific settings.</param> public void Parse(BoostTestRunnerCommandLineArgs args, BoostTestAdapterSettings settings) { IEnumerable <IBoostTestResultOutput> parsers = Enumerable.Empty <IBoostTestResultOutput>(); try { parsers = new IBoostTestResultOutput[] { GetReportParser(args), GetLogParser(args), GetStandardOutput(args, settings), GetStandardError(args, settings) }; Parse(parsers); } finally { foreach (IBoostTestResultOutput parser in parsers) { if (parser != null) { parser.Dispose(); } } } }
/// <summary> /// Identify the version (if possible) of the Boost.Test module /// </summary> /// <param name="runner">The Boost.Test module</param> /// <returns>The Boost version of the Boost.Test module or the empty string if the version cannot be retrieved</returns> private static string GetVersion(IBoostTestRunner runner) { if (!runner.VersionSupported) { return(string.Empty); } using (TemporaryFile output = new TemporaryFile(TestPathGenerator.Generate(runner.Source, ".version.stderr.log"))) { BoostTestRunnerSettings settings = new BoostTestRunnerSettings(); BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { Version = true, StandardErrorFile = output.Path }; int resultCode = EXIT_SUCCESS; using (var context = new DefaultProcessExecutionContext()) { resultCode = runner.Execute(args, settings, context); } if (resultCode != EXIT_SUCCESS) { Logger.Error("--version for {0} failed with exit code {1}. Skipping.", runner.Source, resultCode); return(string.Empty); } var info = File.ReadAllText(args.StandardErrorFile, System.Text.Encoding.ASCII); var match = _versionPattern.Match(info); return((match.Success) ? match.Groups[1].Value : string.Empty); } }
public void ListContentSupport() { IBoostTestRunner runner = A.Fake <IBoostTestRunner>(); string output = null; A.CallTo(() => runner.Capabilities).Returns(new BoostTestRunnerCapabilities { ListContent = true, Version = false }); A.CallTo(() => runner.Execute(A <BoostTestRunnerCommandLineArgs> ._, A <BoostTestRunnerSettings> ._, A <IProcessExecutionContext> ._)).Invokes((call) => { BoostTestRunnerCommandLineArgs args = (BoostTestRunnerCommandLineArgs)call.Arguments.First(); if ((args.ListContent.HasValue) && (args.ListContent.Value == ListContentFormat.DOT)) { output = TestHelper.CopyEmbeddedResourceToDirectory("BoostTestAdapterNunit.Resources.ListContentDOT.sample.8.list.content.gv", args.StandardErrorFile); } }).Returns(0); FakeBoostTestRunnerFactory factory = new FakeBoostTestRunnerFactory(runner); ListContentDiscoverer discoverer = new ListContentDiscoverer(factory, DummyVSProvider.Default); DefaultTestContext context = new DefaultTestContext(); DefaultTestCaseDiscoverySink sink = new DefaultTestCaseDiscoverySink(); discoverer.DiscoverTests(new[] { "a.exe", }, context, sink); // Ensure proper test runner execution Assert.That(factory.ProvisionedRunners.Count, Is.EqualTo(1)); foreach (IBoostTestRunner provisioned in factory.ProvisionedRunners.Select(provision => provision.Item3)) { A.CallTo(() => provisioned.Execute(A <BoostTestRunnerCommandLineArgs> ._, A <BoostTestRunnerSettings> ._, A <IProcessExecutionContext> ._)). WhenArgumentsMatch((arguments) => { BoostTestRunnerCommandLineArgs args = (BoostTestRunnerCommandLineArgs)arguments.First(); return((args.ListContent.HasValue) && (args.ListContent.Value == ListContentFormat.DOT) && (!string.IsNullOrEmpty(args.StandardErrorFile))); }). MustHaveHappened(); } // Ensure proper test discovery Assert.That(sink.Tests.Count, Is.EqualTo(8)); AssertLabelTrait(sink.Tests.FirstOrDefault((vstest) => (vstest.FullyQualifiedName == "test_2")), "l1"); AssertLabelTrait(sink.Tests.FirstOrDefault((vstest) => (vstest.FullyQualifiedName == "test_6")), "l1"); var test_8 = sink.Tests.FirstOrDefault((vstest) => (vstest.FullyQualifiedName == "test_8")); AssertLabelTrait(test_8, "l1"); AssertLabelTrait(test_8, "l2"); AssertLabelTrait(test_8, "l3 withaspace"); Assert.That(output, Is.Not.Null); // Ensure proper environment cleanup Assert.That(File.Exists(output), Is.False); }
public void StdOutStdErrSink() { BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { Log = Sink.StandardError, Report = Sink.StandardOutput }; Assert.That(args.ToString(), Is.EqualTo("\"--log_sink=stderr\" \"--report_sink=stdout\"")); }
public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { BoostTestRunnerSettings adaptedSettings = this.Settings.TestRunnerSettings.Clone(); // Disable timeout since this batching strategy executes more than one test at a time adaptedSettings.Timeout = -1; // Group by source IEnumerable <IGrouping <string, VSTestCase> > sources = tests.GroupBy(test => test.Source); foreach (var source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } // Start by batching tests by TestSuite var batch = _fallBackStrategy.BatchTests(source); // If the Boost.Test module supports test run filters... if (source.Select(GetVersion).All(version => (version >= _minimumVersion))) { BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(source.Key); // Generate the filter set var filterSet = new List <TestFilter>(); foreach (var run in batch) { TestFilter filter = TestFilter.EnableFilter(); // Use the command-line representation of the test suite batch to allow // for the most compact representation (i.e. fully/qualified/test_name_0,test_name_1,test_name_2) filter.TestSet = new PathTestSet(run.Arguments.Tests); filterSet.Add(filter); } // Use the environment variable rather than the command-line '--run_test' to make proper use of test run filters args.Environment["BOOST_TEST_RUN_FILTERS"] = string.Join(":", filterSet); yield return(new TestRun(runner, source, args, adaptedSettings)); } // Else fall-back to regular test suite batching behaviour... else { foreach (var run in batch) { yield return(run); } } } }
/// <summary> /// Parses the Xml report and log file as specified within the provided /// BoostTestRunnerCommandLineArgs instance. /// </summary> /// <param name="args">The BoostTestRunnerCommandLineArgs which specify the report and log file.</param> /// <param name="settings">The BoostTestAdapterSettings which specify adapter specific settings.</param> public static IDictionary <string, TestResult> Parse(BoostTestRunnerCommandLineArgs args, BoostTestAdapterSettings settings) { var results = new Dictionary <string, TestResult>(); Parse(GetReportParser(args, results)); Parse(GetLogParser(args, results)); Parse(GetStandardOutputParser(args, settings, results)); Parse(GetStandardErrorParser(args, settings, results)); return(results); }
public void SampleCommandLineArgs() { BoostTestRunnerCommandLineArgs args = GenerateCommandLineArgs(); // serge: boost 1.60 requires uppercase input Assert.That(args.ToString(), Is.EqualTo("\"--run_test=test,suite/*\" \"--catch_system_errors=no\" \"--log_format=XML\" \"--log_level=test_suite\" \"--log_sink=" + GenerateFullyQualifiedPath("log.xml") + "\" \"--report_format=XML\" \"--report_level=detailed\" \"--report_sink=" + GenerateFullyQualifiedPath("report.xml") + "\" \"--detect_memory_leak=0\" \"--detect_fp_exceptions=yes\" > \"" + GenerateFullyQualifiedPath("stdout.log") + "\" 2> \"" + GenerateFullyQualifiedPath("stderr.log") + "\"")); }
public void BUTAEnviormentVariablePresent() { BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs(); var oExpectedEnviormentVariables = new Dictionary <string, string>() { { "BUTA", "1" } }; CollectionAssert.AreEqual(oExpectedEnviormentVariables, args.Environment); }
private static IBoostTestResultOutput GetStandardError(BoostTestRunnerCommandLineArgs args, BoostTestAdapterSettings settings) { if ((!string.IsNullOrEmpty(args.StandardErrorFile)) && (File.Exists(args.StandardErrorFile))) { return(new BoostStandardError(args.StandardErrorFile) { FailTestOnMemoryLeak = settings.FailTestOnMemoryLeak }); } return(null); }
/// <summary> /// Factory function which returns an appropriate BoostTestRunnerCommandLineArgs structure for batched test runs /// </summary> /// <param name="source">The TestCases source</param> /// <param name="settings">The Boost Test adapter settings currently in use</param> /// <returns>A BoostTestRunnerCommandLineArgs structure for the provided source</returns> private BoostTestRunnerCommandLineArgs GetBatchedTestRunsArguments(string source, BoostTestAdapterSettings settings) { BoostTestRunnerCommandLineArgs args = GetDefaultArguments(source, settings); // Disable standard error/standard output capture args.StandardOutFile = null; args.StandardErrorFile = null; // Disable memory leak detection args.DetectMemoryLeaks = 0; return(args); }
/// <summary> /// Factory method which provides the report IBoostTestResultOutput based on the provided BoostTestRunnerCommandLineArgs /// </summary> /// <param name="args">The command line args which were used to generate the test results</param> /// <returns>An IBoostTestResultOutput or null if one cannot be identified from the provided arguments</returns> private static IBoostTestResultOutput GetReportParser(BoostTestRunnerCommandLineArgs args) { string report = args.ReportFile; if (!string.IsNullOrEmpty(report)) { if (args.ReportFormat == OutputFormat.XML) { return(new BoostXmlReport(args.ReportFile)); } } return(null); }
/// <summary> /// Factory method which provides the log IBoostTestResultOutput based on the provided BoostTestRunnerCommandLineArgs /// </summary> /// <param name="args">The command line args which were used to generate the test results</param> /// <returns>An IBoostTestResultOutput or null if one cannot be identified from the provided arguments</returns> private static IBoostTestResultOutput GetLogParser(BoostTestRunnerCommandLineArgs args) { string log = args.LogFile; if (!string.IsNullOrEmpty(log)) { if (args.LogFormat == OutputFormat.XML) { return(new BoostXmlLog(args.LogFile)); } } return(null); }
public void FilePaths() { BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs(); args.LogFile = "log.xml"; Assert.That(args.LogFile, Is.EqualTo("log.xml")); Assert.That(args.ToString(), Is.EqualTo("\"--log_sink=log.xml\"")); args.WorkingDirectory = @"C:\"; Assert.That(args.LogFile, Is.EqualTo(@"C:\log.xml")); Assert.That(args.ToString(), Is.EqualTo("\"--log_sink=C:\\log.xml\"")); args.LogFile = @"D:\Temp\log.xml"; Assert.That(args.LogFile, Is.EqualTo(@"D:\Temp\log.xml")); Assert.That(args.ToString(), Is.EqualTo("\"--log_sink=D:\\Temp\\log.xml\"")); }
/// <summary> /// Allows specification of an environment via a line separated string /// </summary> /// <param name="args">The arguments to populate</param> /// <param name="environment">The line separated environment string</param> public static void SetEnvironment(this BoostTestRunnerCommandLineArgs args, string environment) { Code.Require(args, "args"); if (!string.IsNullOrEmpty(environment)) { foreach (string entry in environment.Split(new[] { "\r\n", "\n" }, StringSplitOptions.RemoveEmptyEntries)) { string[] keyValuePair = entry.Split(new[] { '=' }, 2, StringSplitOptions.RemoveEmptyEntries); if ((keyValuePair != null) && (keyValuePair.Length == 2)) { args.Environment[keyValuePair[0]] = keyValuePair[1]; } } } }
/// <summary> /// Factory method which provides the standard error IBoostTestResultOutput based on the provided BoostTestRunnerCommandLineArgs and BoostTestAdapterSettings /// </summary> /// <param name="args">The command line args which were used to generate the test results</param> /// <param name="settings">The run time settings which were used to generate the test results</param> /// <param name="results">The test result container indexed by test fully qualified name</param> /// <returns>An IBoostTestResultParser/Source pair or null if one cannot be identified from the provided arguments</returns> private static ParserFactoryResult GetStandardErrorParser(BoostTestRunnerCommandLineArgs args, BoostTestAdapterSettings settings, IDictionary <string, TestResult> results) { if (!string.IsNullOrEmpty(args.StandardErrorFile)) { return(new ParserFactoryResult() { Parser = new BoostStandardError(results) { FailTestOnMemoryLeak = ((settings != null) && (settings.FailTestOnMemoryLeak)) }, SourceFilePath = args.StandardErrorFile }); } return(null); }
public void ListContentCommandLineArgs() { BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs(); args.ListContent = ListContentFormat.DOT; args.StandardOutFile = @"C:\Temp\list_content.dot.out"; args.StandardErrorFile = @"C:\Temp\list_content.dot.err"; const string expected = "\"--list_content=DOT\" > \"C:\\Temp\\list_content.dot.out\" 2> \"C:\\Temp\\list_content.dot.err\""; Assert.That(args.ToString(), Is.EqualTo(expected)); args.ReportFormat = OutputFormat.XML; args.ReportFile = @"C:\Temp\list_content.report.xml"; // list content only includes the --list_content and the output redirection commands Assert.That(args.ToString(), Is.EqualTo(expected)); }
/// <summary> /// Factory function which returns an appropriate BoostTestRunnerCommandLineArgs structure /// </summary> /// <param name="source">The TestCases source</param> /// <param name="settings">The Boost Test adapter settings currently in use</param> /// <returns>A BoostTestRunnerCommandLineArgs structure for the provided source</returns> private BoostTestRunnerCommandLineArgs GetDefaultArguments(string source, BoostTestAdapterSettings settings) { BoostTestRunnerCommandLineArgs args = settings.CommandLineArgs.Clone(); GetDebugConfigurationProperties(source, settings, args); // Specify log and report file information args.LogFormat = OutputFormat.XML; args.LogLevel = settings.LogLevel; args.LogFile = TestPathGenerator.Generate(source, FileExtensions.LogFile); args.ReportFormat = OutputFormat.XML; args.ReportLevel = ReportLevel.Detailed; args.ReportFile = TestPathGenerator.Generate(source, FileExtensions.ReportFile); args.StandardOutFile = ((settings.EnableStdOutRedirection) ? TestPathGenerator.Generate(source, FileExtensions.StdOutFile) : null); args.StandardErrorFile = ((settings.EnableStdErrRedirection) ? TestPathGenerator.Generate(source, FileExtensions.StdErrFile) : null); return(args); }
/// <summary> /// Factory method which provides the log IBoostTestResultOutput based on the provided BoostTestRunnerCommandLineArgs /// </summary> /// <param name="args">The command line args which were used to generate the test results</param> /// <param name="results">The test result container indexed by test fully qualified name</param> /// <returns>An IBoostTestResultParser/Source pair or null if one cannot be identified from the provided arguments</returns> private static ParserFactoryResult GetLogParser(BoostTestRunnerCommandLineArgs args, IDictionary <string, TestResult> results) { if (args.LogFormat != OutputFormat.XML) { return(null); } string log = args.LogFile; if (!string.IsNullOrEmpty(log)) { return(new ParserFactoryResult() { Parser = new BoostXmlLog(results), SourceFilePath = log }); } return(null); }
/// <summary> /// Factory method which provides the report IBoostTestResultOutput based on the provided BoostTestRunnerCommandLineArgs /// </summary> /// <param name="args">The command line args which were used to generate the test results</param> /// <param name="results">The test result container indexed by test fully qualified name</param> /// <returns>An IBoostTestResultParser/Source pair or null if one cannot be identified from the provided arguments</returns> private static ParserFactoryResult GetReportParser(BoostTestRunnerCommandLineArgs args, IDictionary <string, TestResult> results) { if (args.ReportFormat != OutputFormat.XML) { return(null); } string report = args.ReportFile; if (!string.IsNullOrEmpty(report)) { return(new ParserFactoryResult() { Parser = new BoostXmlReport(results), SourceFilePath = report }); } return(null); }
private void Execute(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings) { this.Args.Add(args); this.Settings.Add(settings); Assert.That(args.ReportFile, Is.Not.Null); Assert.That(args.ReportFormat, Is.EqualTo(OutputFormat.XML)); Assert.That(args.LogFile, Is.Not.Null); Assert.That(args.LogFormat, Is.EqualTo(OutputFormat.XML)); string temp = Path.GetDirectoryName(Path.GetTempPath()); Assert.That(Path.GetDirectoryName(args.ReportFile), Is.EqualTo(temp)); Assert.That(Path.GetDirectoryName(args.LogFile), Is.EqualTo(temp)); if (!string.IsNullOrEmpty(args.StandardOutFile)) { Assert.That(Path.GetDirectoryName(args.StandardOutFile), Is.EqualTo(temp)); } if (!string.IsNullOrEmpty(args.StandardErrorFile)) { Assert.That(Path.GetDirectoryName(args.StandardErrorFile), Is.EqualTo(temp)); } // Create empty result files just in case we are running via the source batching strategy Copy("BoostTestAdapterNunit.Resources.ReportsLogs.Empty.sample.test.log.xml", args.LogFile); Copy("BoostTestAdapterNunit.Resources.ReportsLogs.Empty.sample.test.report.xml", args.ReportFile); // Copy the default result files to a temporary location so that they can eventually be read as a TestResultCollection foreach (string test in args.Tests) { TestResources resources = Parent.TestResourceProvider(this.Source, test); Copy(resources.ReportFilePath, args.ReportFile); Copy(resources.LogFilePath, args.LogFile); } }
public void VersionAnnotation() { IBoostTestRunner runner = A.Fake <IBoostTestRunner>(); A.CallTo(() => runner.Capabilities).Returns(new BoostTestRunnerCapabilities { ListContent = true, Version = true }); A.CallTo(() => runner.Execute(A <BoostTestRunnerCommandLineArgs> ._, A <BoostTestRunnerSettings> ._, A <IProcessExecutionContext> ._)).Invokes((call) => { BoostTestRunnerCommandLineArgs args = (BoostTestRunnerCommandLineArgs)call.Arguments.First(); // --list_content=DOT if ((args.ListContent.HasValue) && (args.ListContent.Value == ListContentFormat.DOT) && (!string.IsNullOrEmpty(args.StandardErrorFile))) { TestHelper.CopyEmbeddedResourceToDirectory("BoostTestAdapterNunit.Resources.ListContentDOT.sample.3.list.content.gv", args.StandardErrorFile); } // --version else if ((args.Version) && (!string.IsNullOrEmpty(args.StandardErrorFile))) { TestHelper.CopyEmbeddedResourceToDirectory("BoostTestAdapterNunit.Resources.Version.sample.version.stderr.log", args.StandardErrorFile); } }).Returns(0); FakeBoostTestRunnerFactory factory = new FakeBoostTestRunnerFactory(runner); ListContentDiscoverer discoverer = new ListContentDiscoverer(factory, DummyVSProvider.Default); DefaultTestContext context = new DefaultTestContext(); DefaultTestCaseDiscoverySink sink = new DefaultTestCaseDiscoverySink(); discoverer.DiscoverTests(new[] { "test.exe", }, context, sink); // Ensure proper test discovery Assert.That(sink.Tests.Count, Is.Not.EqualTo(0)); // Ensure that version property is available foreach (var test in sink.Tests) { var version = test.GetPropertyValue(VSTestModel.VersionProperty); Assert.That(version, Is.EqualTo("1.63.0")); } }
public void VersionCommandLineArgs() { BoostTestRunnerCommandLineArgs args = new BoostTestRunnerCommandLineArgs() { Version = true }; // Version without output redirection { const string expected = "\"--version\""; Assert.That(args.ToString(), Is.EqualTo(expected)); } // Version with output redirection { args.StandardOutFile = @"C:\Temp\version.out"; args.StandardErrorFile = @"C:\Temp\version.err"; const string expected = "\"--version\" > \"C:\\Temp\\version.out\" 2> \"C:\\Temp\\version.err\""; Assert.That(args.ToString(), Is.EqualTo(expected)); } }
public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { // Group by source var sources = tests.GroupBy(test => test.Source); foreach (var source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } // Group by tests individually foreach (VSTestCase test in source) { BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(runner.Source); args.Tests.Add(test.FullyQualifiedName); yield return(new TestRun(runner, new VSTestCase[] { test }, args, this.Settings.TestRunnerSettings)); } } }
/// <summary> /// Factory function which returns an appropriate BoostTestRunnerCommandLineArgs structure /// </summary> /// <param name="source">The TestCases source</param> /// <param name="settings">The Boost Test adapter settings currently in use</param> /// <param name="debugMode">Determines whether the test should be debugged or not.</param> /// <returns>A BoostTestRunnerCommandLineArgs structure for the provided source</returns> private BoostTestRunnerCommandLineArgs GetDefaultArguments(string source, BoostTestAdapterSettings settings, bool debugMode) { BoostTestRunnerCommandLineArgs args = settings.CommandLineArgs.Clone(); GetDebugConfigurationProperties(source, settings, args); // Specify log and report file information args.LogFormat = OutputFormat.XML; args.LogLevel = settings.LogLevel; args.LogFile = TestPathGenerator.Generate(source, FileExtensions.LogFile); args.ReportFormat = OutputFormat.XML; args.ReportLevel = ReportLevel.Detailed; args.ReportFile = TestPathGenerator.Generate(source, FileExtensions.ReportFile); args.StandardOutFile = ((settings.EnableStdOutRedirection) ? TestPathGenerator.Generate(source, FileExtensions.StdOutFile) : null); args.StandardErrorFile = ((settings.EnableStdErrRedirection) ? TestPathGenerator.Generate(source, FileExtensions.StdErrFile) : null); // Set '--catch_system_errors' to 'yes' if the test is not being debugged // or if this value was not overridden via configuration before-hand args.CatchSystemErrors = args.CatchSystemErrors.GetValueOrDefault(false) || !debugMode; return(args); }
public override IEnumerable <TestRun> BatchTests(IEnumerable <VSTestCase> tests) { BoostTestRunnerSettings adaptedSettings = this.Settings.TestRunnerSettings.Clone(); adaptedSettings.Timeout = -1; // Group by source var sources = tests.GroupBy(test => test.Source); foreach (var source in sources) { IBoostTestRunner runner = GetTestRunner(source.Key); if (runner == null) { continue; } BoostTestRunnerCommandLineArgs args = BuildCommandLineArgs(source.Key); // NOTE the --run_test command-line arg is left empty so that all tests are executed yield return(new TestRun(runner, source, args, adaptedSettings)); } }