protected override TestResult RunImpl( ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor ) { using(progressMonitor.BeginTask( "Verifying Specifications", rootTestCommand.TestCount ) ) { if( options.SkipTestExecution ) { return SkipAll( rootTestCommand, parentTestStep ); } else { ITestContext rootContext = rootTestCommand.StartPrimaryChildStep( parentTestStep ); TestStep rootStep = rootContext.TestStep; TestOutcome outcome = TestOutcome.Passed; foreach( ITestCommand command in rootTestCommand.Children ) { NSpecAssemblyTest assemblyTest = command.Test as NSpecAssemblyTest; if( assemblyTest == null ) continue; var assemblyResult = this.RunAssembly( command, rootStep ); outcome = outcome.CombineWith( assemblyResult.Outcome ); } return rootContext.FinishStep( outcome, null ); } } }
/// <inheritdoc /> protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { ThrowIfDisposed(); using (progressMonitor.BeginTask(Resources.MbUnit2TestController_RunningMbUnitTests, 1)) { if (progressMonitor.IsCanceled) return new TestResult(TestOutcome.Canceled); if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } else { IList<ITestCommand> testCommands = rootTestCommand.GetAllCommands(); using (InstrumentedFixtureRunner fixtureRunner = new InstrumentedFixtureRunner(fixtureExplorer, testCommands, progressMonitor, parentTestStep)) { return fixtureRunner.Run(); } } } }
protected internal override TestResult RunImpl(ITestCommand rootTestCommand, Model.Tree.TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask("Running tests.", rootTestCommand.TestCount)) { // Note: We do not check options.SkipTestExecution here because we want to build up // the tree of data-driven test steps. So we actually check it later on in the // PatternTestExecutor. This is different from framework adapters // at this time (because they do not generally support dynamically generated data-driven tests). Sandbox sandbox = new Sandbox(); EventHandler canceledHandler = delegate { sandbox.Abort(TestOutcome.Canceled, "The user canceled the test run."); }; try { progressMonitor.Canceled += canceledHandler; TestAssemblyExecutionParameters.Reset(); PatternTestExecutor executor = new PatternTestExecutor(options, progressMonitor, formatter, converter, environmentManager); // Inlined to minimize stack depth. var action = executor.CreateActionToRunTest(rootTestCommand, parentTestStep, sandbox, null); action.Run(); return action.Result; } finally { progressMonitor.Canceled -= canceledHandler; sandbox.Dispose(); } } }
public PatternTestExecutor(TestExecutionOptions options, IProgressMonitor progressMonitor, IFormatter formatter, IConverter converter, ITestEnvironmentManager environmentManager) { this.options = options; this.progressMonitor = progressMonitor; this.formatter = formatter; this.converter = converter; this.environmentManager = environmentManager; scheduler = new WorkScheduler(() => options.SingleThreaded ? 1 : TestAssemblyExecutionParameters.DegreeOfParallelism); }
public void SetUp() { logger = MockRepository.GenerateStub<ILogger>(); driver = new IronRubyTestDriver(logger); var testIsolationProvider = (ITestIsolationProvider)RuntimeAccessor.ServiceLocator.ResolveByComponentId("Gallio.LocalTestIsolationProvider"); testIsolationContext = testIsolationProvider.CreateContext(new TestIsolationOptions(), logger); testPackage = new TestPackage(); testExplorationOptions = new TestExplorationOptions(); testExecutionOptions = new TestExecutionOptions(); messageSink = MockRepository.GenerateStub<IMessageSink>(); progressMonitor = NullProgressMonitor.CreateInstance(); }
protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(Resources.XunitTestController_RunningXunitTests, rootTestCommand.TestCount)) { if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } else { return RunTest(rootTestCommand, parentTestStep, progressMonitor); } } }
/// <summary> /// Initializes the event arguments. /// </summary> /// <param name="testPackage">The test package.</param> /// <param name="testExplorationOptions">The test exploration options.</param> /// <param name="testExecutionOptions">The test execution options.</param> /// <param name="reportLockBox">The report lock-box which may be used to access the report asynchronously during execution.</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="testPackage"/>, /// <paramref name="testExplorationOptions"/> or <paramref name="testExecutionOptions"/> is null.</exception> public RunStartedEventArgs(TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, LockBox<Report> reportLockBox) { if (testPackage == null) throw new ArgumentNullException("testPackage"); if (testExplorationOptions == null) throw new ArgumentNullException("testExplorationOptions"); if (testExecutionOptions == null) throw new ArgumentNullException("testExecutionOptions"); this.testPackage = testPackage; this.testExplorationOptions = testExplorationOptions; this.testExecutionOptions = testExecutionOptions; this.reportLockBox = reportLockBox; }
protected override void RunTestsInternal(ITestCommand rootTestCommand, ITestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { using (progressMonitor) { progressMonitor.BeginTask("Verifying Specifications", rootTestCommand.TestCount); if (options.SkipTestExecution) { SkipAll(rootTestCommand, parentTestStep); } else { RunTest(rootTestCommand, parentTestStep, progressMonitor); } } }
/// <inheritdoc /> protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(Resources.ConcordionTestController_RunningConcordionTests, rootTestCommand.TestCount)) { if (progressMonitor.IsCanceled) return new TestResult(TestOutcome.Canceled); if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } else { return RunTest(rootTestCommand, parentTestStep, progressMonitor); } } }
void SetupRunOptions(TestExecutionOptions options) { // Gallio has an extremely flexible mechanism for defining test filters // For now I will just try to pick out tags that should be included or excluded var metaFilters = from filter in options.FilterSet.Rules let rule = filter.Filter as Gallio.Model.Filters.MetadataFilter<Gallio.Model.Filters.ITestDescriptor> where rule != null select new { RuleType = filter.RuleType, Rule = rule }; var tagFilters = from meta in metaFilters let rule = meta.Rule let value = rule.ValueFilter as Gallio.Model.Filters.EqualityFilter<string> where value != null && rule.Key == "Tag" group value.Comparand by meta.RuleType into g select g; var includeTags = tagFilters.SingleOrDefault(g => g.Key == Gallio.Model.Filters.FilterRuleType.Inclusion) ?? Enumerable.Empty<string>(); var excludeTags = tagFilters.SingleOrDefault(g => g.Key == Gallio.Model.Filters.FilterRuleType.Exclusion) ?? Enumerable.Empty<string>(); _options = new RunOptions(includeTags, excludeTags, new string[0]); }
/// <inheritdoc /> protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { IList<Test> allTheTest = parentTestStep.Test.Children; PopulateSuiteFixtureData(allTheTest, options); using (progressMonitor.BeginTask(Resources.ConcordionTestController_RunningConcordionTests, rootTestCommand.TestCount)) { if (progressMonitor.IsCanceled) { return new TestResult(TestOutcome.Canceled); } if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } else { return RunTest(rootTestCommand, parentTestStep, progressMonitor); } } }
/// <inheritdoc /> protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { IList<ITestCommand> testCommands = rootTestCommand.GetAllCommands(); using (progressMonitor.BeginTask(Resources.CSUnitTestController_RunningCSUnitTests, testCommands.Count)) { if (progressMonitor.IsCanceled) { return new TestResult(TestOutcome.Canceled); } if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } using (RunnerMonitor monitor = new RunnerMonitor(testCommands, parentTestStep, progressMonitor)) { return monitor.Run(assemblyLocation); } } }
protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { using (progressMonitor) { progressMonitor.BeginTask("Verifying Specifications", rootTestCommand.TestCount); if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } else { ITestContext rootContext = rootTestCommand.StartPrimaryChildStep(parentTestStep); TestStep rootStep = rootContext.TestStep; TestOutcome outcome = TestOutcome.Passed; _progressMonitor = progressMonitor; SetupRunOptions(options); SetupListeners(options); _listener.OnRunStart(); foreach (ITestCommand command in rootTestCommand.Children) { MachineAssemblyTest assemblyTest = command.Test as MachineAssemblyTest; if( assemblyTest == null ) continue; var assemblyResult = RunAssembly(assemblyTest, command, rootStep); outcome = outcome.CombineWith( assemblyResult.Outcome); } _listener.OnRunEnd(); return rootContext.FinishStep( outcome, null); } } }
/// <inheritdoc /> protected override TestResult RunImpl(ITestCommand rootTestCommand, TestStep parentTestStep, TestExecutionOptions options, IProgressMonitor progressMonitor) { ThrowIfDisposed(); IList<ITestCommand> testCommands = rootTestCommand.GetAllCommands(); using (progressMonitor.BeginTask(Resources.NUnitTestController_RunningNUnitTests, testCommands.Count)) { if (progressMonitor.IsCanceled) return new TestResult(TestOutcome.Canceled); if (options.SkipTestExecution) { return SkipAll(rootTestCommand, parentTestStep); } else { using (RunMonitor monitor = new RunMonitor(runner, testCommands, parentTestStep, progressMonitor)) { return monitor.Run(); } } } }
public IEnumerable<AutoTest.TestRunners.Shared.Results.TestResult> Run(RunSettings settings) { if (!_isInitialized) return new AutoTest.TestRunners.Shared.Results.TestResult[] { getNotInitializedResult(settings) }; var tests = settings.Assembly.Tests.ToList(); var members = settings.Assembly.Members.ToList(); var namespaces = settings.Assembly.Namespaces.ToList(); var runAll = namespaces.Count == 0 && members.Count == 0 && tests.Count == 0; var steps = new List<TestStepData>(); var testResults = new List<AutoTest.TestRunners.Shared.Results.TestResult>(); // Get a test isolation context. Here we want to run tests in the same AppDomain. var testIsolationProvider = (ITestIsolationProvider)RuntimeAccessor.ServiceLocator.ResolveByComponentId("Gallio.LocalTestIsolationProvider"); var testIsolationOptions = new TestIsolationOptions(); ITestIsolationContext testIsolationContext = testIsolationProvider.CreateContext(testIsolationOptions, _logger); var testPackage = new TestPackage(); testPackage.AddFile(new FileInfo(settings.Assembly.Assembly)); testPackage.TestFrameworkFallbackMode = TestFrameworkFallbackMode.Strict; // Create some test exploration options. Nothing interesting for you here, probably. var testExplorationOptions = new TestExplorationOptions(); var messageSink = new MessageConsumer() .Handle<TestStepStartedMessage>((message) => { steps.Add(message.Step); }) .Handle<TestStepFinishedMessage>(message => { var test = steps.FirstOrDefault(x => x.Id.Equals(message.StepId) && x.IsTestCase); if (test == null) return; var fixture = string.Format("{0}.{1}", test.CodeReference.NamespaceName, steps.First(x => x.Id.Equals(test.ParentId)).Name); testResults.Add(new AutoTest.TestRunners.Shared.Results.TestResult( "MbUnit", settings.Assembly.Assembly, fixture, message.Result.Duration.TotalMilliseconds, string.Format("{0}.{1}", fixture, test.Name), convertState(message.Result.Outcome.Status), message.Result.Outcome.DisplayName)); }); // Provide a progress monitor. var logProgressMonitorProvider = new LogProgressMonitorProvider(_logger); var options = new TestExecutionOptions(); options.FilterSet = new Gallio.Model.Filters.FilterSet<ITestDescriptor>(new OrFilter<ITestDescriptor>(getTestFilter(namespaces, members, tests))); // Run the tests. logProgressMonitorProvider.Run((progressMonitor) => { _testDriver.Run(testIsolationContext, testPackage, testExplorationOptions, options, messageSink, progressMonitor); }); return testResults; }
private void RunAllTests(IRunContext runContext) { ITestRunnerManager runnerManager = RuntimeAccessor.ServiceLocator.Resolve<ITestRunnerManager>(); var runner = runnerManager.CreateTestRunner(StandardTestRunnerFactoryNames.IsolatedAppDomain); runner.RegisterExtension(new RunContextExtension(runContext)); ILogger logger = new RunContextLogger(runContext); TestRunnerOptions testRunnerOptions = new TestRunnerOptions(); try { RunWithProgressMonitor(delegate(IProgressMonitor progressMonitor) { runner.Initialize(testRunnerOptions, logger, progressMonitor); }); if (isCanceled) return; TestPackage testPackage = new TestPackage(); testPackage.AddExcludedTestFrameworkId("MSTestAdapter.TestFramework"); foreach (ITestElement testElement in runContext.RunConfig.TestElements) { GallioTestElement gallioTestElement = testElement as GallioTestElement; if (gallioTestElement != null) { testPackage.AddFile(new FileInfo(gallioTestElement.AssemblyPath)); } } TestExplorationOptions testExplorationOptions = new TestExplorationOptions(); TestExecutionOptions testExecutionOptions = new TestExecutionOptions(); List<Filter<string>> idFilters = new List<Filter<string>>(); foreach (ITestElement includedTestElement in runContext.RunConfig.TestElements) { GallioTestElement gallioTestElement = includedTestElement as GallioTestElement; if (gallioTestElement != null) idFilters.Add(new EqualityFilter<string>(gallioTestElement.GallioTestId)); } testExecutionOptions.FilterSet = new FilterSet<ITestDescriptor>(new IdFilter<ITestDescriptor>(new OrFilter<string>(idFilters))); RunWithProgressMonitor(delegate(IProgressMonitor progressMonitor) { runner.Run(testPackage, testExplorationOptions, testExecutionOptions, progressMonitor); }); } finally { runner.Dispose(NullProgressMonitor.CreateInstance()); } }
private void ExploreOrRun(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor, string taskName) { using (progressMonitor.BeginTask(taskName, Math.Max(testPackage.Files.Count, 1))) { foreach (FileInfo file in testPackage.Files) { if (progressMonitor.IsCanceled) return; RemoteMessageSink remoteMessageSink = new RemoteMessageSink(messageSink); ExploreOrRunAssembly(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, remoteMessageSink, progressMonitor.CreateSubProgressMonitor(1), taskName, file); } } }
/// <inheritdoc /> protected override void RunImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { double workItems = Math.Max(testPackage.Files.Count, 1); using (progressMonitor.BeginTask("Running unsupported tests.", workItems * 2)) { TestModel testModel = PublishTestModelFromFiles(testPackage.Files, messageSink, progressMonitor); if (testModel != null) RunTestModel(testModel, messageSink, progressMonitor, workItems); } }
private void ExploreOrRun(Type driverType, object[] driverArguments, string assemblyPath, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { Assembly assembly = LoadAssembly(assemblyPath); using (var queuedMessageSink = new QueuedMessageSink(messageSink)) { var testDriver = (DotNetTestDriver)Activator.CreateInstance(driverType, driverArguments); if (testExecutionOptions == null) { testDriver.ExploreAssembly(assembly, testExplorationOptions, queuedMessageSink, progressMonitor); } else { testDriver.RunAssembly(assembly, testExplorationOptions, testExecutionOptions, queuedMessageSink, progressMonitor); } } }
/// <summary> /// Creates a copy of the options. /// </summary> /// <returns>The copy.</returns> public TestExecutionOptions Copy() { var copy = new TestExecutionOptions(); copy.filterSet = filterSet; copy.exactFilter = exactFilter; copy.skipDynamicTests = skipDynamicTests; copy.skipTestExecution = skipTestExecution; copy.singleThreaded = singleThreaded; copy.properties.AddAll(properties); return copy; }
protected override void RunImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask("Running tests.", 1)) { if (testIsolationContext.RequiresSingleThreadedExecution && !testExecutionOptions.SingleThreaded) { testExecutionOptions = testExecutionOptions.Copy(); testExecutionOptions.SingleThreaded = true; } ForEachDriver(testFrameworkManager.SelectTestFrameworksForFilesImpl( testFrameworkHandles, testFrameworkFallbackMode, testFrameworkOptions, testPackage.Files), (driver, items, driverCount) => { TestPackage testPackageForDriver = CreateTestPackageWithFiles(testPackage, items); driver.Run(testIsolationContext, testPackageForDriver, testExplorationOptions, testExecutionOptions, messageSink, progressMonitor.CreateSubProgressMonitor(1.0 / driverCount)); return(false); }); } }
/// <summary> /// Called automatically when isolation task is ready to run. /// </summary> protected override object RunImpl(object[] args) { file_ = (FileInfo)args[0]; architecture_ = (string)args[1]; configuration_ = (string)args[2]; testExecutionOptions_ = (TestExecutionOptions)args[3]; logger_ = (ILogger)args[4]; progressMonitor_ = (IProgressMonitor)args[5]; using (QueuedMessageSink sink = new QueuedMessageSink((IMessageSink)args[6])) { messageSink_ = sink; testModel_ = new TestModel(); currentTest_ = testModel_.RootTest; currentTestSuite_ = testModel_.RootTest; using (progressMonitor_.BeginTask("Processing " + file_.Name, 100)) { // Expect native DLL to be reachable in subdirectory relative to the current assembly path. string assemblyDir = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); string suffix = Path.Combine(architecture_, configuration_); // Make sure we can find the right version of Boost.Test DLL. if (!SetDllDirectory(Path.Combine(assemblyDir, suffix))) { Logger.Log( LogSeverity.Error, String.Format( "Failed to adjust DLL directory search path: {0}", new Win32Exception(Marshal.GetLastWin32Error()).Message ) ); return null; } // Try loading native bridge DLL. IntPtr hModule = LoadLibrary(Path.Combine(assemblyDir, Path.Combine(suffix, NativeDllName))); if (hModule == IntPtr.Zero) { Logger.Log( LogSeverity.Error, String.Format( "Failed to load native DLL to communicate with Boost.Test: {0}", new Win32Exception(Marshal.GetLastWin32Error()).Message ) ); return null; } try { // Make sure we allow loading additional DLLs // from the same folder our testable DLL is located in. if (!SetDllDirectory(File.DirectoryName)) { Logger.Log( LogSeverity.Error, String.Format( "Failed to adjust DLL directory search path: {0}", new Win32Exception(Marshal.GetLastWin32Error()).Message ) ); return null; } progressMonitor_.Worked(14); // Retrieve pointer to function in native bridge DLL that is required to // perform our task. IntPtr bridgeFunc = GetProcAddress(hModule, BridgeFunctionName); if (bridgeFunc == IntPtr.Zero) { Logger.Log( LogSeverity.Error, String.Format( "Failed to retrieve entry point {0} in Boost.Test interface: {1}", BridgeFunctionName, new Win32Exception(Marshal.GetLastWin32Error()).Message ) ); return null; } progressMonitor_.Worked(1); // Perform the task. Execute(bridgeFunc, progressMonitor_.CreateSubProgressMonitor(80)); } finally { FreeLibrary(hModule); progressMonitor_.Worked(5); } } } return null; }
private void ExploreOrRun(Type driverType, object[] driverArguments, string assemblyPath, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { Assembly assembly = LoadAssembly(assemblyPath); using (var queuedMessageSink = new QueuedMessageSink(messageSink)) { var testDriver = (DotNetTestDriver) Activator.CreateInstance(driverType, driverArguments); if (testExecutionOptions == null) testDriver.ExploreAssembly(assembly, testExplorationOptions, queuedMessageSink, progressMonitor); else testDriver.RunAssembly(assembly, testExplorationOptions, testExecutionOptions, queuedMessageSink, progressMonitor); } }
private void ExploreOrRunAssembly(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, RemoteMessageSink remoteMessageSink, IProgressMonitor progressMonitor, string taskName, FileInfo file) { using (progressMonitor.BeginTask(taskName, 100)) { if (progressMonitor.IsCanceled) return; string assemblyPath = file.FullName; progressMonitor.SetStatus("Getting test assembly metadata."); AssemblyMetadata assemblyMetadata = AssemblyUtils.GetAssemblyMetadata(assemblyPath, AssemblyMetadataFields.RuntimeVersion); progressMonitor.Worked(2); if (progressMonitor.IsCanceled) return; if (assemblyMetadata != null) { Type driverType = GetType(); object[] driverArguments = GetRemoteTestDriverArguments(); HostSetup hostSetup = CreateHostSetup(testPackage, assemblyPath, assemblyMetadata); using (var remoteProgressMonitor = new RemoteProgressMonitor( progressMonitor.CreateSubProgressMonitor(97))) { testIsolationContext.RunIsolatedTask<ExploreOrRunTask>(hostSetup, (statusMessage) => progressMonitor.SetStatus(statusMessage), new object[] { driverType, driverArguments, assemblyPath, testExplorationOptions, testExecutionOptions, remoteMessageSink, remoteProgressMonitor }); } // Record one final work unit after the isolated task has been fully cleaned up. progressMonitor.SetStatus(""); progressMonitor.Worked(1); } } }
/// <summary> /// Runs tests in an assembly. /// </summary> /// <remarks> /// <para> /// The default implementation does nothing. Subclasses may override to enable tests /// to be run. This is required for actually running tests. /// </para> /// <para> /// This method executes within the test host which is most likely a different AppDomain /// or Process from the test runner itself. /// </para> /// </remarks> /// <param name="assembly">The test assembly, not null.</param> /// <param name="testExplorationOptions">The test exploration options, not null.</param> /// <param name="testExecutionOptions">The test execution options, not null.</param> /// <param name="messageSink">The message sink, not null.</param> /// <param name="progressMonitor">The progress monitor, not null.</param> protected virtual void RunAssembly(Assembly assembly, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { }
/// <inheritdoc /> public void Run(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { if (testIsolationContext == null) throw new ArgumentNullException("testIsolationContext"); if (testPackage == null) throw new ArgumentNullException("testPackage"); if (testExplorationOptions == null) throw new ArgumentNullException("testExplorationOptions"); if (testExecutionOptions == null) throw new ArgumentNullException("testExecutionOptions"); if (messageSink == null) throw new ArgumentNullException("messageSink"); if (progressMonitor == null) throw new ArgumentNullException("progressMonitor"); RunImpl(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, messageSink, progressMonitor); }
void SetupListeners(TestExecutionOptions options) { _listener = new AggregateRunListener(Enumerable.Empty<ISpecificationRunListener>()); }
/// <inheritdoc /> sealed protected override void RunImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { ExploreOrRun(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, messageSink, progressMonitor, "Running tests."); }
private static void ExploreOrRun(TestPackage testPackage, ScriptRuntimeSetup scriptRuntimeSetup, string testDriverScriptPath, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor, ILogger logger) { using (BufferedLogWriter outputWriter = new BufferedLogWriter(logger, LogSeverity.Info, Encoding.Default), errorWriter = new BufferedLogWriter(logger, LogSeverity.Error, Encoding.Default)) { using (var queuedMessageSink = new QueuedMessageSink(messageSink)) { using (new ConsoleRedirection(outputWriter, errorWriter)) { var scriptRuntime = new ScriptRuntime(scriptRuntimeSetup); scriptRuntime.IO.SetInput(Stream.Null, TextReader.Null, Encoding.Default); scriptRuntime.IO.SetOutput(new TextWriterStream(outputWriter), outputWriter); scriptRuntime.IO.SetErrorOutput(new TextWriterStream(errorWriter), errorWriter); try { var scriptParameters = new Dictionary<string, object>(); scriptParameters.Add("Verb", testExecutionOptions != null ? "Run" : "Explore"); scriptParameters.Add("TestPackage", testPackage); scriptParameters.Add("TestExplorationOptions", testExplorationOptions); scriptParameters.Add("TestExecutionOptions", testExecutionOptions); scriptParameters.Add("MessageSink", queuedMessageSink); scriptParameters.Add("ProgressMonitor", progressMonitor); scriptParameters.Add("Logger", logger); scriptRuntime.Globals.SetVariable(ScriptParametersVariableName, scriptParameters); RunScript(scriptRuntime, testDriverScriptPath); } finally { scriptRuntime.Shutdown(); } } } } }
/// <summary> /// Runs tests from a test package. /// </summary> /// <remarks> /// <para> /// The default implementation does nothing. Subclasses may override to enable tests /// to be run. This is required for actually running tests. /// </para> /// </remarks> /// <param name="testIsolationContext">The test isolation context, not null.</param> /// <param name="testPackage">The test package, not null.</param> /// <param name="testExplorationOptions">The test exploration options, not null.</param> /// <param name="testExecutionOptions">The test execution options, not null.</param> /// <param name="messageSink">The message sink to receive test exploration and execution messages, not null.</param> /// <param name="progressMonitor">The progress monitor, not null.</param> /// <returns>The test report.</returns> protected virtual void RunImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { }
/// <inheritdoc /> protected override sealed void RunImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { ExploreOrRun(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, messageSink, progressMonitor, "Running tests."); }
/// <inheritdoc /> public Report Run(TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IProgressMonitor progressMonitor) { if (testPackage == null) throw new ArgumentNullException("testPackageConfig"); if (testExplorationOptions == null) throw new ArgumentNullException("testExplorationOptions"); if (testExecutionOptions == null) throw new ArgumentNullException("testExecutionOptions"); if (progressMonitor == null) throw new ArgumentNullException("progressMonitor"); ThrowIfDisposed(); if (state != State.Initialized) throw new InvalidOperationException("The test runner must be initialized before this operation is performed."); testPackage = testPackage.Copy(); testExplorationOptions = testExplorationOptions.Copy(); testExecutionOptions = testExecutionOptions.Copy(); GenericCollectionUtils.ForEach(testRunnerOptions.Properties, x => testPackage.AddProperty(x.Key, x.Value)); using (progressMonitor.BeginTask("Running the tests.", 10)) { Stopwatch stopwatch = Stopwatch.StartNew(); Report report = new Report() { TestPackage = new TestPackageData(testPackage), TestModel = new TestModelData(), TestPackageRun = new TestPackageRun() { StartTime = DateTime.Now } }; var reportLockBox = new LockBox<Report>(report); eventDispatcher.NotifyRunStarted(new RunStartedEventArgs(testPackage, testExplorationOptions, testExecutionOptions, reportLockBox)); bool success; using (Listener listener = new Listener(eventDispatcher, tappedLogger, reportLockBox)) { try { ITestDriver testDriver = testFrameworkManager.GetTestDriver( testPackage.CreateTestFrameworkSelector(), tappedLogger); using (testIsolationContext.BeginBatch(progressMonitor.SetStatus)) { testDriver.Run(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, listener, progressMonitor.CreateSubProgressMonitor(10)); } success = true; } catch (Exception ex) { success = false; tappedLogger.Log(LogSeverity.Error, "A fatal exception occurred while running tests. Possible causes include invalid test runner parameters and stack overflows.", ex); report.TestModel.Annotations.Add(new AnnotationData(AnnotationType.Error, CodeLocation.Unknown, CodeReference.Unknown, "A fatal exception occurred while running tests. See log for details.", null)); } finally { report.TestPackageRun.EndTime = DateTime.Now; report.TestPackageRun.Statistics.Duration = stopwatch.Elapsed.TotalSeconds; } } eventDispatcher.NotifyRunFinished(new RunFinishedEventArgs(success, report)); return report; } }
private void ExploreOrRun(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor, string taskName) { using (progressMonitor.BeginTask(taskName, 1)) { if (progressMonitor.IsCanceled) return; FileInfo testDriverScriptFile = GetTestDriverScriptFile(testPackage); if (testDriverScriptFile == null) return; HostSetup hostSetup = CreateHostSetup(testPackage); ScriptRuntimeSetup scriptRuntimeSetup = CreateScriptRuntimeSetup(testPackage); string testDriverScriptPath = testDriverScriptFile.FullName; var remoteMessageSink = new RemoteMessageSink(messageSink); var remoteLogger = new RemoteLogger(logger); using (var remoteProgressMonitor = new RemoteProgressMonitor(progressMonitor.CreateSubProgressMonitor(1))) { testIsolationContext.RunIsolatedTask<ExploreOrRunTask>(hostSetup, (statusMessage) => progressMonitor.SetStatus(statusMessage), new object[] { testPackage, scriptRuntimeSetup, testDriverScriptPath, testExplorationOptions, testExecutionOptions, remoteMessageSink, remoteProgressMonitor, remoteLogger }); } } }