Ejemplo n.º 1
0
        // NOTE: an earlier version of this code had a FilterBuilder
        // property. This seemed to make sense, because we instantiate
        // it in two different places. However, the existence of an
        // NUnitTestFilterBuilder, containing a reference to an engine 
        // service caused our second-level tests of the test executor
        // to throw an exception. So if you consider doing this, beware!

        #endregion

        #region ITestExecutor Implementation

        /// <summary>
        /// Called by the Visual Studio IDE to run all tests. Also called by TFS Build
        /// to run either all or selected tests. In the latter case, a filter is provided
        /// as part of the run context.
        /// </summary>
        /// <param name="sources">Sources to be run.</param>
        /// <param name="runContext">Context to use when executing the tests.</param>
        /// <param name="frameworkHandle">Test log to send results and messages through</param>
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
#if LAUNCHDEBUGGER
            if (!Debugger.IsAttached)
                Debugger.Launch();
#endif
            Initialize(runContext, frameworkHandle);

            try
            {
                foreach (var source in sources)
                {
                    var assemblyName = source;
                    if (!Path.IsPathRooted(assemblyName))
                        assemblyName = Path.Combine(Environment.CurrentDirectory, assemblyName);

                    TestLog.Info("Running all tests in " + assemblyName);

                    RunAssembly(assemblyName, TestFilter.Empty);
                }
            }
            catch (Exception ex)
            {
                if (ex is TargetInvocationException)
                    ex = ex.InnerException;
                TestLog.Error("Exception thrown executing tests", ex);
            }
            finally
            {
                TestLog.Info(string.Format("NUnit Adapter {0}: Test execution complete", AdapterVersion));
                Unload();
            }

        }
Ejemplo n.º 2
0
        /// <summary>
        /// Runs the tests.
        /// </summary>
        /// <param name="tests">Which tests should be run.</param>
        /// <param name="context">Context in which to run tests.</param>
        /// <param param name="framework">Where results should be stored.</param>
        public void RunTests(IEnumerable<TestCase> tests, IRunContext context, IFrameworkHandle framework)
        {
            _state = ExecutorState.Running;

            foreach (var test in tests)
            {
                if (_state == ExecutorState.Cancelling)
                {
                    _state = ExecutorState.Cancelled;
                    return;
                }

                try
                {
                    var reportDocument = RunOrDebugCatchTest(test.Source, test.FullyQualifiedName, context, framework);
                    var result = GetTestResultFromReport(test, reportDocument, framework);
                    framework.RecordResult(result);
                }
                catch (Exception ex)
                {
                    // Log it and move on. It will show up to the user as a test that hasn't been run.
                    framework.SendMessage(TestMessageLevel.Error, "Exception occured when processing test case: " + test.FullyQualifiedName);
                    framework.SendMessage(TestMessageLevel.Informational, "Message: " + ex.Message + "\nStacktrace:" + ex.StackTrace);
                }
            }
        }
Ejemplo n.º 3
0
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            this.frameworkHandle = frameworkHandle;

            var testLogger = new TestLogger(frameworkHandle);

            testLogger.SendMainMessage("Execution started");

            foreach (var group in tests.GroupBy(t => t.Source))
            {
                testLogger.SendInformationalMessage(String.Format("Running selected: '{0}'", group.Key));

                try
                {
                    using (var sandbox = new Sandbox<Executor>(group.Key))
                    {
                        var assemblyDirectory = new DirectoryInfo(Path.GetDirectoryName(group.Key));
                        Directory.SetCurrentDirectory(assemblyDirectory.FullName);
                        sandbox.Content.Execute(this, group.Select(t => t.FullyQualifiedName).ToArray());
                    }
                }
                catch (Exception ex)
                {
                    testLogger.SendErrorMessage(ex, String.Format("Exception found while executing tests in group '{0}'", group.Key));

                    // just go on with the next
                }
            }

            testLogger.SendMainMessage("Execution finished");
        }
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            //Debugger.Launch();

            frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING);

            Settings settings = GetSettings(runContext);

            foreach (string currentAsssembly in sources.Distinct())
            {
                try
                {
                    if (!File.Exists(Path.Combine(Path.GetDirectoryName(Path.GetFullPath(currentAsssembly)),"Machine.Specifications.dll")))
                    {
                        frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Machine.Specifications.dll not found for {0}", currentAsssembly));
                        continue;
                    }

                    frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly));

                    this.executor.RunAssembly(currentAsssembly, settings, uri, frameworkHandle);
                }
                catch (Exception ex)
                {
                    frameworkHandle.SendMessage(TestMessageLevel.Error, String.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message));
                }
            }

            frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Complete on {0} assemblies ", sources.Count()));
            
        }
Ejemplo n.º 5
0
 public void RunTests(IEnumerable<string> sources, IRunContext runContext,
     IFrameworkHandle frameworkHandle)
 {
     SetupExecutionPolicy();
     IEnumerable<TestCase> tests = PowerShellTestDiscoverer.GetTests(sources, null);
     RunTests(tests, runContext, frameworkHandle);
 }
Ejemplo n.º 6
0
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            Guard.ArgumentNotNull("sources", sources);
            Guard.ArgumentNotNull("runContext", runContext);
            Guard.ArgumentNotNull("frameworkHandle", frameworkHandle);

            var cleanupList = new List<ExecutorWrapper>();

            try
            {
                RemotingUtility.CleanUpRegisteredChannels();

                cancelled = false;

                foreach (string source in sources)
                    if (VsTestRunner.IsXunitTestAssembly(source))
                        RunTestsInAssembly(cleanupList, source, frameworkHandle);
            }
            finally
            {
                Thread.Sleep(1000);

                foreach (var executorWrapper in cleanupList)
                    executorWrapper.Dispose();
            }
        }
Ejemplo n.º 7
0
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            this.frameworkHandle = frameworkHandle;

            var testLogger = new TestLogger(frameworkHandle);

            testLogger.SendMainMessage("Execution started");

            foreach (var source in sources)
            {
                try
                {
                    using (var sandbox = new Sandbox<Executor>(source))
                    {
                        testLogger.SendInformationalMessage(String.Format("Running: '{0}'", source));

                        var assemblyDirectory = new DirectoryInfo(Path.GetDirectoryName(source));
                        Directory.SetCurrentDirectory(assemblyDirectory.FullName);
                        sandbox.Content.Execute(this);
                    }
                }
                catch (Exception ex)
                {
                    testLogger.SendErrorMessage(ex, String.Format("Exception found while executing tests in source '{0}'", source));

                    // just go on with the next
                }
            }

            testLogger.SendMainMessage("Execution finished");
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Called by the Visual Studio IDE to run all tests. Also called by TFS Build
        /// to run either all or selected tests. In the latter case, a filter is provided
        /// as part of the run context.
        /// </summary>
        /// <param name="sources">Sources to be run.</param>
        /// <param name="runContext">Context to use when executing the tests.</param>
        /// <param name="frameworkHandle">Test log to send results and messages through</param>
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            testLog.Initialize(frameworkHandle);
            Info("executing tests", "started");

            try
            {
                // Ensure any channels registered by other adapters are unregistered
                CleanUpRegisteredChannels();

                var tfsfilter = new TFSTestFilter(runContext);
                testLog.SendDebugMessage("Keepalive:" + runContext.KeepAlive);
                if (!tfsfilter.HasTfsFilterValue && runContext.KeepAlive)
                    frameworkHandle.EnableShutdownAfterTestRun = true;

                foreach (var source in sources)
                {
                    using (currentRunner = new AssemblyRunner(testLog, source, tfsfilter))
                    {
                        currentRunner.RunAssembly(frameworkHandle);
                    }

                    currentRunner = null;
                }
            }
            catch (Exception ex)
            {
                testLog.SendErrorMessage("Exception " + ex);
            }
            finally
            {
                Info("executing tests", "finished");
            }
        }
Ejemplo n.º 9
0
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            IMessageLogger log = frameworkHandle;

            log.Version();

            HandlePoorVisualStudioImplementationDetails(runContext, frameworkHandle);

            foreach (var assemblyPath in sources)
            {
                try
                {
                    if (AssemblyDirectoryContainsFixie(assemblyPath))
                    {
                        log.Info("Processing " + assemblyPath);

                        var listener = new VisualStudioListener(frameworkHandle, assemblyPath);

                        using (var environment = new ExecutionEnvironment(assemblyPath))
                        {
                            environment.RunAssembly(new Options(), listener);
                        }
                    }
                    else
                    {
                        log.Info("Skipping " + assemblyPath + " because it is not a test assembly.");
                    }
                }
                catch (Exception exception)
                {
                    log.Error(exception);
                }
            }
        }
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING);
            int executedSpecCount = 0;
            string currentAsssembly = string.Empty;
            try
            {
                ISpecificationExecutor specificationExecutor = this.adapterFactory.CreateExecutor();
                IEnumerable<IGrouping<string, TestCase>> groupBySource = tests.GroupBy(x => x.Source);
                foreach (IGrouping<string, TestCase> grouping in groupBySource)
                {
                    currentAsssembly = grouping.Key;
                    frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly));
                    specificationExecutor.RunAssemblySpecifications(currentAsssembly, MSpecTestAdapter.uri, runContext, frameworkHandle, grouping);
                    executedSpecCount += grouping.Count();
                }

                frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_COMPLETE, executedSpecCount, groupBySource.Count()));
            }
            catch (Exception ex)
            {
                frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message));
            }
            finally
            {
            }
        }
Ejemplo n.º 11
0
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            ChutzpahTracer.TraceInformation("Begin Test Adapter Run Tests");

            var settingsProvider = runContext.RunSettings.GetSettings(AdapterConstants.SettingsName) as ChutzpahAdapterSettingsProvider;
            var settings = settingsProvider != null ? settingsProvider.Settings : new ChutzpahAdapterSettings();

            ChutzpahTracingHelper.Toggle(settings.EnabledTracing);

            var testOptions = new TestOptions
                {
                    TestLaunchMode =
                        runContext.IsBeingDebugged ? TestLaunchMode.Custom:
                        settings.OpenInBrowser ? TestLaunchMode.FullBrowser:
                        TestLaunchMode.HeadlessBrowser,
                    CustomTestLauncher     = runContext.IsBeingDebugged ? new VsDebuggerTestLauncher() : null,
                    MaxDegreeOfParallelism = runContext.IsBeingDebugged ? 1 : settings.MaxDegreeOfParallelism,
                    ChutzpahSettingsFileEnvironments = new ChutzpahSettingsFileEnvironments(settings.ChutzpahSettingsFileEnvironments)
                };

            testOptions.CoverageOptions.Enabled = runContext.IsDataCollectionEnabled;

            var callback = new ParallelRunnerCallbackAdapter(new ExecutionCallback(frameworkHandle, runContext));
            testRunner.RunTests(sources, testOptions, callback);

            ChutzpahTracer.TraceInformation("End Test Adapter Run Tests");

        }
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            //Debugger.Launch();

            frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING);

            int executedSpecCount = 0;

            Settings settings = GetSettings(runContext);

            string currentAsssembly = string.Empty;
            try
            {

                foreach (IGrouping<string, TestCase> grouping in tests.GroupBy(x => x.Source)) {
                    currentAsssembly = grouping.Key;
                    frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly));

                    List<VisualStudioTestIdentifier> testsToRun = grouping.Select(test => test.ToVisualStudioTestIdentifier()).ToList();

                    this.executor.RunAssemblySpecifications(currentAsssembly, testsToRun, settings, uri, frameworkHandle);
                    executedSpecCount += grouping.Count();
                }

                frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_COMPLETE, executedSpecCount, tests.GroupBy(x => x.Source).Count()));
            } catch (Exception ex)
            {
                frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message));
            }
            finally
            {
            }
        }
        /// <summary>
        /// Runs the tests.
        /// </summary>
        /// <param name="tests">Tests to be run.</param>
        /// <param name="runContext">Context to use when executing the tests.</param>
        /// <param param name="frameworkHandle">Handle to the framework to record results and to do framework operations.</param>
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            m_cancelled = false;
            try
            {
                foreach (TestCase test in tests)
                {
                    if (m_cancelled)
                    {
                        break;
                    }
                    frameworkHandle.RecordStart(test);
                    frameworkHandle.SendMessage(TestMessageLevel.Informational, "Starting external test for " + test.DisplayName);
                    var testOutcome = RunExternalTest(test, runContext, frameworkHandle, test);
                    frameworkHandle.RecordResult(testOutcome);
                    frameworkHandle.SendMessage(TestMessageLevel.Informational, "Test result:" + testOutcome.Outcome.ToString());


                }
            }
            catch(Exception e)
            {
                frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception during test execution: " +e.Message);
            }
}
Ejemplo n.º 14
0
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            List<TestCase> tests = new List<TestCase>();

            TestDiscoverer.VisitTests(sources, t => tests.Add(t));

            InternalRunTests(tests, runContext, frameworkHandle, null);
        }
Ejemplo n.º 15
0
 private void RunTests(string source, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     foreach (var result in ExternalTestExecutor.GetTestResults(source, null).Select(c => CreateTestResult(source, c)))
     {
         frameworkHandle.RecordStart(result.TestCase);
         frameworkHandle.RecordResult(result);
         frameworkHandle.RecordEnd(result.TestCase, result.Outcome);
     }
 }
        public virtual void Debug(BoostTestRunnerCommandLineArgs args, BoostTestRunnerSettings settings, IFrameworkHandle framework)
        {
            Utility.Code.Require(settings, "settings");

            using (Process process = Debug(framework, GetStartInfo(args, settings)))
            {
                MonitorProcess(process, settings.Timeout);
            }
        }
Ejemplo n.º 17
0
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) {
            ValidateArg.NotNull(tests, "tests");
            ValidateArg.NotNull(runContext, "runContext");
            ValidateArg.NotNull(frameworkHandle, "frameworkHandle");

            _cancelRequested.Reset();

            RunTestCases(tests, runContext, frameworkHandle);
        }
Ejemplo n.º 18
0
 public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     RunTests(sources.Select(x => new TestCase(x, ExecutorDetails.Uri, x)
     {
         CodeFilePath = x,
         LineNumber = 2,
         DisplayName = "Abra cadabra"
     }), runContext, frameworkHandle);
 }
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            var validSources = from source in sources
                               where source.EndsWith(StringHelper.GetSearchExpression(), StringComparison.CurrentCultureIgnoreCase)
                               select source;

            foreach (var source in validSources) {
                RunTest(frameworkHandle, source);
            }
        }
 public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     //Debugger.Launch();
     // this a temporary hack until I can figure out why running the specs per assembly directly using mspec does not work with a large number of specifications
     // when they are run diectly the first 100 or so specs run fine and then an error occurs saying it has taken more than 10 seconds and is being stopped
     // for now we just rediscover and run them like that, makes no sense
     TestCaseCollector collector = new TestCaseCollector();
     this.DiscoverTests(sources, runContext, frameworkHandle, collector);
     this.RunTests(collector.TestCases, runContext, frameworkHandle);
 }
Ejemplo n.º 21
0
        private void RunTests(IRunContext runContext, IFrameworkHandle testExecutionRecorder)
        {
            if (runContext.InIsolation)
                launcher.TestProject.TestRunnerFactoryName = StandardTestRunnerFactoryNames.IsolatedAppDomain;

            var extension = new VSTestWindowExtension(testExecutionRecorder, testCaseFactory, testResultFactory);

            launcher.TestProject.AddTestRunnerExtension(extension);
            launcher.Run();
        }
Ejemplo n.º 22
0
 public TestGeneratorAdapterTests()
 {
     testGeneratorDiscoverer = Substitute.For<ITestGeneratorDiscoverer>();
     discoveryContext = Substitute.For<IDiscoveryContext>();
     discoverySink = Substitute.For<ITestCaseDiscoverySink>();
     logger = Substitute.For<IMessageLogger>();
     frameworkHandle = Substitute.For<IFrameworkHandle>();
     runContext = Substitute.For<IRunContext>();
     testGenerator = Substitute.For<ITestGenerator>();
 }
        public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle testExecutionRecorder)
        {
            launcher = new TestLauncher();

            foreach (var source in sources)
            {
                launcher.AddFilePattern(source);
            }

            RunTests(runContext, testExecutionRecorder);
        }
        public void RunAssembly(string source, Settings settings, Uri executorUri, IFrameworkHandle frameworkHandle)
        {
            source = Path.GetFullPath(source);

            using (var scope = new IsolatedAppDomainExecutionScope<AppDomainExecutor>(source)) {
                VSProxyAssemblySpecificationRunListener listener = new VSProxyAssemblySpecificationRunListener(source, frameworkHandle, executorUri, settings);

                AppDomainExecutor executor = scope.CreateInstance();
                executor.RunAllTestsInAssembly(source, listener);
            }
        }
        public TestExecutorSinkAdapter(IFrameworkHandle frameworkHandle, IEnumerable<TestCase> tests)
        {
            _frameworkHandle = frameworkHandle;

            _tests = new Dictionary<string, TestCase>();

            foreach(var test in tests)
            {
                _tests[test.FullyQualifiedName] = test;
            }
        }
Ejemplo n.º 26
0
        public VSTracker(IFrameworkHandle frameworkHandle, ITestSuite suite)
        {
            _frameworkHandle = frameworkHandle;

            _expectedTests = new Dictionary<string, TestCase>();
            foreach (var testCase in suite.TestCases)
            {
                foreach (var test in testCase.Tests)
                {
                    _expectedTests.Add(createDictionaryKey(test), test.ToVSTest());
                }
            }
        }
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle testExecutionRecorder)
        {
            launcher = new TestLauncher();

            foreach (var test in tests)
            {
                launcher.AddFilePattern(test.Source);
            }

            SetTestFilter(tests);

            RunTests(runContext, testExecutionRecorder);
        }
        public void RunAssembly(string source, Uri uri, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            source = Path.GetFullPath(source);
            if (!File.Exists(source))
            {
                throw new ArgumentException("Could not find file: " + source);
            }

            string assemblyFilename = source;
            string defaultConfigFile = SpecificationExecutor.GetDefaultConfigFile(source);
            runManager = new MSpecVSRunnerManager();
            runManager.RunAllTestsInAssembly(assemblyFilename, defaultConfigFile, frameworkHandle, uri);
        }
Ejemplo n.º 29
0
        public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            this.frameworkHandle = frameworkHandle;

            foreach (var group in tests.GroupBy(t => t.Source))
            {
                frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running selected: " + group.Key);

                using (var sandbox = new Sandbox<Executor>(group.Key))
                {
                    sandbox.Content.Execute(this, group.Select(t => t.FullyQualifiedName).ToArray());
                }
            }
        }
        public void RunAssemblySpecifications(string source, Uri uri, IRunContext runContext, IFrameworkHandle frameworkHandle, IEnumerable<TestCase> specifications)
        {
            source = Path.GetFullPath(source);
            if (!File.Exists(source))
            {
                throw new ArgumentException("Could not find file: " + source);
            }

            string assemblyFilename = source;
            string defaultConfigFile = SpecificationExecutor.GetDefaultConfigFile(source);
            IEnumerable<string> specsToRun = specifications.Select(x => x.FullyQualifiedName).ToList();
            runManager = new MSpecVSRunnerManager();
            runManager.RunTestsInAssembly(assemblyFilename, defaultConfigFile, frameworkHandle, specsToRun, uri);
        }
Ejemplo n.º 31
0
 public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     RunTestsWithSourcesCallback?.Invoke(sources, runContext, frameworkHandle);
 }
Ejemplo n.º 32
0
        public void RunTests(IEnumerable <Microsoft.VisualStudio.TestPlatform.ObjectModel.TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            // We'll just punt and run everything in each file that contains the selected tests
            var sources = tests.Select(test => test.Source).Distinct();

            RunTests(sources, runContext, frameworkHandle);
        }
Ejemplo n.º 33
0
        /// <summary>
        /// Execute the parameter tests present in parameter source
        /// </summary>
        /// <param name="tests">Tests to execute.</param>
        /// <param name="runContext">The run context.</param>
        /// <param name="frameworkHandle">Handle to record test start/end/results.</param>
        /// <param name="source">The test container for the tests.</param>
        /// <param name="isDeploymentDone">Indicates if deployment is done.</param>
        private void ExecuteTestsInSource(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle, string source, bool isDeploymentDone)
        {
            Debug.Assert(!string.IsNullOrEmpty(source), "Source cannot be empty");

            source = isDeploymentDone
                         ? Path.Combine(
                PlatformServiceProvider.Instance.TestDeployment.GetDeploymentDirectory(),
                Path.GetFileName(source)) : source;

            using (var isolationHost = PlatformServiceProvider.Instance.CreateTestSourceHost(source, runContext?.RunSettings, frameworkHandle))
            {
                var testRunner = isolationHost.CreateInstanceForType(
                    typeof(UnitTestRunner),
                    new object[] { MSTestSettings.CurrentSettings }) as UnitTestRunner;
                PlatformServiceProvider.Instance.AdapterTraceLogger.LogInfo("Created unit-test runner {0}", source);

                this.ExecuteTestsWithTestRunner(tests, runContext, frameworkHandle, source, testRunner);

                PlatformServiceProvider.Instance.AdapterTraceLogger.LogInfo(
                    "Executed tests belonging to source {0}",
                    source);
            }
        }
Ejemplo n.º 34
0
        private void RunTestCases(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            // May be null, but this is handled by RunTestCase if it matters.
            // No VS instance just means no debugging, but everything else is
            // okay.
            using (var app = VisualStudioApp.FromEnvironmentVariable(NodejsConstants.NodeToolsProcessIdEnvironmentVariable)) {
                // .njsproj file path -> project settings
                var sourceToSettings = new Dictionary <string, NodejsProjectSettings>();

                foreach (var test in tests)
                {
                    if (_cancelRequested.WaitOne(0))
                    {
                        break;
                    }

                    try {
                        RunTestCase(app, frameworkHandle, runContext, test, sourceToSettings);
                    } catch (Exception ex) {
                        frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString());
                    }
                }
            }
        }
Ejemplo n.º 35
0
 protected abstract void InvokeExecutor(LazyExtension <ITestExecutor, ITestExecutorCapabilities> executor, Tuple <Uri, string> executorUriExtensionTuple, RunContext runContext, IFrameworkHandle frameworkHandle);
Ejemplo n.º 36
0
 public ExecutionCallback(IFrameworkHandle frameworkHandle, IRunContext runContext)
 {
     this.frameworkHandle = frameworkHandle;
     this.runContext      = runContext;
 }
Ejemplo n.º 37
0
        void ITestExecutor.RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            Guard.ArgumentNotNull("sources", sources);

            var stopwatch = Stopwatch.StartNew();
            var logger    = new LoggerHelper(frameworkHandle, stopwatch);

            // In this case, we need to go thru the files manually
            if (ContainsAppX(sources))
            {
#if PLATFORM_DOTNET
                var sourcePath = Windows.ApplicationModel.Package.Current.InstalledLocation.Path;
#else
                var sourcePath = Environment.CurrentDirectory;
#endif
                sources = Directory.GetFiles(sourcePath, "*.dll")
                          .Where(file => !platformAssemblies.Contains(Path.GetFileName(file)))
                          .ToList();
            }

            RunTests(runContext, frameworkHandle, logger, () => GetTests(sources, logger, runContext));
        }
Ejemplo n.º 38
0
        void ITestExecutor.RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            Guard.ArgumentNotNull("tests", tests);
            Guard.ArgumentValid("tests", "AppX not supported in this overload", !ContainsAppX(tests.Select(t => t.Source)));

            var stopwatch = Stopwatch.StartNew();
            var logger    = new LoggerHelper(frameworkHandle, stopwatch);

            RunTests(
                runContext, frameworkHandle, logger,
                () => tests.GroupBy(testCase => testCase.Source)
                .Select(group => new AssemblyRunInfo {
                AssemblyFileName = group.Key, Configuration = LoadConfiguration(group.Key), TestCases = group.ToList()
            })
                .ToList()
                );
        }
Ejemplo n.º 39
0
        private void RunTestCase(
            IFrameworkHandle frameworkHandle,
            IRunContext runContext,
            TestCase test,
            Dictionary <string, PythonProjectSettings> sourceToSettings
            )
        {
            var testResult = new TestResult(test);

            frameworkHandle.RecordStart(test);
            testResult.StartTime = DateTimeOffset.Now;

            PythonProjectSettings settings;

            if (!sourceToSettings.TryGetValue(test.Source, out settings))
            {
                sourceToSettings[test.Source] = settings = LoadProjectSettings(test.Source, _interpreterService);
            }
            if (settings == null)
            {
                frameworkHandle.SendMessage(
                    TestMessageLevel.Error,
                    "Unable to determine interpreter to use for " + test.Source);
                RecordEnd(
                    frameworkHandle,
                    test,
                    testResult,
                    null,
                    "Unable to determine interpreter to use for " + test.Source,
                    TestOutcome.Failed);
                return;
            }

            var debugMode = PythonDebugMode.None;

            if (runContext.IsBeingDebugged && _app != null)
            {
                debugMode = settings.EnableNativeCodeDebugging ? PythonDebugMode.PythonAndNative : PythonDebugMode.PythonOnly;
            }

            var testCase = new PythonTestCase(settings, test, debugMode);

            var dte = _app != null?_app.GetDTE() : null;

            if (dte != null && debugMode != PythonDebugMode.None)
            {
                dte.Debugger.DetachAll();
            }

            if (!File.Exists(settings.Factory.Configuration.InterpreterPath))
            {
                frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.Factory.Configuration.InterpreterPath);
                return;
            }

            var env           = new Dictionary <string, string>();
            var pythonPathVar = settings.Factory.Configuration.PathEnvironmentVariable;
            var pythonPath    = testCase.SearchPaths;

            if (!string.IsNullOrWhiteSpace(pythonPathVar))
            {
                if (_app != null)
                {
                    var settingsManager = SettingsManagerCreator.GetSettingsManager(dte);
                    if (settingsManager != null)
                    {
                        var store = settingsManager.GetReadOnlySettingsStore(SettingsScope.UserSettings);
                        if (store != null && store.CollectionExists(@"PythonTools\Options\General"))
                        {
                            var  settingStr = store.GetString(@"PythonTools\Options\General", "ClearGlobalPythonPath", "True");
                            bool settingBool;
                            if (bool.TryParse(settingStr, out settingBool) && !settingBool)
                            {
                                pythonPath += ";" + Environment.GetEnvironmentVariable(pythonPathVar);
                            }
                        }
                    }
                }
                env[pythonPathVar] = pythonPath;
            }

            foreach (var envVar in testCase.Environment)
            {
                env[envVar.Key] = envVar.Value;
            }

            using (var proc = ProcessOutput.Run(
                       !settings.IsWindowsApplication ?
                       settings.Factory.Configuration.InterpreterPath :
                       settings.Factory.Configuration.WindowsInterpreterPath,
                       testCase.Arguments,
                       testCase.WorkingDirectory,
                       env,
                       false,
                       null
                       )) {
                bool killed = false;

#if DEBUG
                frameworkHandle.SendMessage(TestMessageLevel.Informational, "cd " + testCase.WorkingDirectory);
                frameworkHandle.SendMessage(TestMessageLevel.Informational, "set " + (pythonPathVar ?? "") + "=" + (pythonPath ?? ""));
                frameworkHandle.SendMessage(TestMessageLevel.Informational, proc.Arguments);
#endif

                proc.Wait(TimeSpan.FromMilliseconds(500));
                if (debugMode != PythonDebugMode.None)
                {
                    if (proc.ExitCode.HasValue)
                    {
                        // Process has already exited
                        frameworkHandle.SendMessage(TestMessageLevel.Error, "Failed to attach debugger because the process has already exited.");
                        if (proc.StandardErrorLines.Any())
                        {
                            frameworkHandle.SendMessage(TestMessageLevel.Error, "Standard error from Python:");
                            foreach (var line in proc.StandardErrorLines)
                            {
                                frameworkHandle.SendMessage(TestMessageLevel.Error, line);
                            }
                        }
                    }

                    try {
                        if (debugMode == PythonDebugMode.PythonOnly)
                        {
                            string qualifierUri = string.Format("tcp://{0}@localhost:{1}", testCase.DebugSecret, testCase.DebugPort);
                            while (!_app.AttachToProcess(proc, PythonRemoteDebugPortSupplierUnsecuredId, qualifierUri))
                            {
                                if (proc.Wait(TimeSpan.FromMilliseconds(500)))
                                {
                                    break;
                                }
                            }
                        }
                        else
                        {
                            var engines = new[] { PythonDebugEngineGuid, VSConstants.DebugEnginesGuids.NativeOnly_guid };
                            while (!_app.AttachToProcess(proc, engines))
                            {
                                if (proc.Wait(TimeSpan.FromMilliseconds(500)))
                                {
                                    break;
                                }
                            }
                        }

#if DEBUG
                    } catch (COMException ex) {
                        frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee.");
                        frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString());
                        try {
                            proc.Kill();
                        } catch (InvalidOperationException) {
                            // Process has already exited
                        }
                        killed = true;
                    }
#else
                    } catch (COMException) {
Ejemplo n.º 40
0
        public void RunTests(IEnumerable <VsTestCase> vsTestCasesToRun, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            try
            {
                TryRunTests(vsTestCasesToRun, runContext, frameworkHandle);
            }
            catch (Exception e)
            {
                _logger.LogError("Exception while running tests: " + e);
            }

            CommonFunctions.ReportErrors(_logger, "test execution", _settings.DebugMode);
        }
Ejemplo n.º 41
0
        public void RunTests(IEnumerable <string> executables, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            try
            {
                TryRunTests(executables, runContext, frameworkHandle);
            }
            catch (Exception e)
            {
                _logger.LogError($"Exception while running tests: {e}");
            }

            CommonFunctions.ReportErrors(_logger, "test execution", _settings.DebugMode);
        }
Ejemplo n.º 42
0
        private void DoRunTests(ICollection <TestCase> testCasesToRun, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            if (testCasesToRun.Count == 0)
            {
                return;
            }

            bool isRunningInsideVisualStudio = !string.IsNullOrEmpty(runContext.SolutionDirectory);
            var  reporter = new VsTestFrameworkReporter(frameworkHandle, isRunningInsideVisualStudio, _logger);

            var debuggerAttacher       = _debuggerAttacher ?? new MessageBasedDebuggerAttacher(_settings.DebuggingNamedPipeId, _logger);
            var processExecutorFactory = new DebuggedProcessExecutorFactory(frameworkHandle, debuggerAttacher);

            lock (_lock)
            {
                if (_canceled)
                {
                    return;
                }

                _executor = new GoogleTestExecutor(_logger, _settings, processExecutorFactory);
            }
            _executor.RunTests(testCasesToRun, reporter, runContext.IsBeingDebugged);
            reporter.AllTestsFinished();
        }
Ejemplo n.º 43
0
 public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     throw new NotImplementedException();
 }
Ejemplo n.º 44
0
        static void RunTests(IMessageLogger log, IFrameworkHandle frameworkHandle, string assemblyPath, Action <NamedPipeServerStream> sendCommand)
        {
            if (!IsTestAssembly(assemblyPath))
            {
                log.Info("Skipping " + assemblyPath + " because it is not a test assembly.");
                return;
            }

            log.Info("Processing " + assemblyPath);

            var pipeName = Guid.NewGuid().ToString();

            Environment.SetEnvironmentVariable("FIXIE_NAMED_PIPE", pipeName);

            using (var pipe = new NamedPipeServerStream(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Message))
                using (var process = Start(assemblyPath, frameworkHandle))
                {
                    pipe.WaitForConnection();

                    sendCommand(pipe);

                    var recorder = new ExecutionRecorder(frameworkHandle, assemblyPath);

                    PipeMessage.CaseStarted lastCaseStarted = null;

                    while (true)
                    {
                        var messageType = pipe.ReceiveMessage();

                        if (messageType == typeof(PipeMessage.CaseStarted).FullName)
                        {
                            var message = pipe.Receive <PipeMessage.CaseStarted>();
                            lastCaseStarted = message;
                            recorder.Record(message);
                        }
                        else if (messageType == typeof(PipeMessage.CaseSkipped).FullName)
                        {
                            var testResult = pipe.Receive <PipeMessage.CaseSkipped>();
                            recorder.Record(testResult);
                        }
                        else if (messageType == typeof(PipeMessage.CasePassed).FullName)
                        {
                            var testResult = pipe.Receive <PipeMessage.CasePassed>();
                            recorder.Record(testResult);
                        }
                        else if (messageType == typeof(PipeMessage.CaseFailed).FullName)
                        {
                            var testResult = pipe.Receive <PipeMessage.CaseFailed>();
                            recorder.Record(testResult);
                        }
                        else if (messageType == typeof(PipeMessage.Exception).FullName)
                        {
                            var exception = pipe.Receive <PipeMessage.Exception>();
                            throw new RunnerException(exception);
                        }
                        else if (messageType == typeof(PipeMessage.Completed).FullName)
                        {
                            var completed = pipe.Receive <PipeMessage.Completed>();
                            break;
                        }
                        else if (!string.IsNullOrEmpty(messageType))
                        {
                            var body = pipe.ReceiveMessage();
                            log.Error($"The test runner received an unexpected message of type {messageType}: {body}");
                        }
                        else
                        {
                            var exception = new TestProcessExitException(process.TryGetExitCode());

                            if (lastCaseStarted != null)
                            {
                                recorder.Record(new PipeMessage.CaseFailed
                                {
                                    Test      = lastCaseStarted.Test,
                                    Name      = lastCaseStarted.Name,
                                    Exception = new PipeMessage.Exception(exception)
                                });
                            }

                            throw exception;
                        }
                    }
                }
        }
Ejemplo n.º 45
0
 protected abstract IEnumerable <Tuple <Uri, string> > GetExecutorUriExtensionMap(IFrameworkHandle testExecutorFrameworkHandle, RunContext runContext);
Ejemplo n.º 46
0
        protected override IEnumerable <Tuple <Uri, string> > GetExecutorUriExtensionMap(IFrameworkHandle testExecutorFrameworkHandle, RunContext runContext)
        {
            this.executorUriVsTestList = this.GetExecutorVsTestCaseList(this.testCases);

            Debug.Assert(this.TestExecutionContext.TestCaseFilter == null, "TestCaseFilter should be null for specific tests.");
            runContext.FilterExpressionWrapper = null;

            return(this.executorUriVsTestList.Keys);
        }
Ejemplo n.º 47
0
        private void RunTestCase(VisualStudioApp app, IFrameworkHandle frameworkHandle, IRunContext runContext, TestCase test, Dictionary <string, NodejsProjectSettings> sourceToSettings)
        {
            var testResult = new TestResult(test);

            frameworkHandle.RecordStart(test);
            testResult.StartTime = DateTimeOffset.Now;
            NodejsProjectSettings settings;

            if (!sourceToSettings.TryGetValue(test.Source, out settings))
            {
                sourceToSettings[test.Source] = settings = LoadProjectSettings(test.Source);
            }
            if (settings == null)
            {
                frameworkHandle.SendMessage(
                    TestMessageLevel.Error,
                    "Unable to determine interpreter to use for " + test.Source);
                RecordEnd(
                    frameworkHandle,
                    test,
                    testResult,
                    null,
                    "Unable to determine interpreter to use for " + test.Source,
                    TestOutcome.Failed);
                return;
            }

            NodejsTestInfo testInfo = new NodejsTestInfo(test.FullyQualifiedName);
            List <string>  args     = new List <string>();
            int            port     = 0;

            if (runContext.IsBeingDebugged && app != null)
            {
                app.GetDTE().Debugger.DetachAll();
                args.AddRange(GetDebugArgs(settings, out port));
            }

            var workingDir = Path.GetDirectoryName(CommonUtils.GetAbsoluteFilePath(settings.WorkingDir, testInfo.ModulePath));

            args.AddRange(GetInterpreterArgs(test, workingDir, settings.ProjectRootDir));

            //Debug.Fail("attach debugger");
            if (!File.Exists(settings.NodeExePath))
            {
                frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.NodeExePath);
                return;
            }
            lock (_syncObject) {
                _nodeProcess = ProcessOutput.Run(
                    settings.NodeExePath,
                    args,
                    workingDir,
                    null,
                    false,
                    null,
                    false);

#if DEBUG
                frameworkHandle.SendMessage(TestMessageLevel.Informational, "cd " + workingDir);
                frameworkHandle.SendMessage(TestMessageLevel.Informational, _nodeProcess.Arguments);
#endif

                _nodeProcess.Wait(TimeSpan.FromMilliseconds(500));
                if (runContext.IsBeingDebugged && app != null)
                {
                    try {
                        //the '#ping=0' is a special flag to tell VS node debugger not to connect to the port,
                        //because a connection carries the consequence of setting off --debug-brk, and breakpoints will be missed.
                        string qualifierUri = string.Format("tcp://localhost:{0}#ping=0", port);
                        while (!app.AttachToProcess(_nodeProcess, NodejsRemoteDebugPortSupplierUnsecuredId, qualifierUri))
                        {
                            if (_nodeProcess.Wait(TimeSpan.FromMilliseconds(500)))
                            {
                                break;
                            }
                        }
#if DEBUG
                    } catch (COMException ex) {
                        frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee.");
                        frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString());
                        KillNodeProcess();
                    }
#else
                    } catch (COMException) {
Ejemplo n.º 48
0
 protected override void InvokeExecutor(LazyExtension <ITestExecutor, ITestExecutorCapabilities> executor, Tuple <Uri, string> executorUri, RunContext runContext, IFrameworkHandle frameworkHandle)
 {
     executor?.Value.RunTests(this.executorUriVsTestList[executorUri], runContext, frameworkHandle);
 }
Ejemplo n.º 49
0
        /// <summary>
        /// Execute the parameter tests
        /// </summary>
        /// <param name="tests">Tests to execute.</param>
        /// <param name="runContext">The run context.</param>
        /// <param name="frameworkHandle">Handle to record test start/end/results.</param>
        /// <param name="isDeploymentDone">Indicates if deployment is done.</param>
        internal virtual void ExecuteTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle, bool isDeploymentDone)
        {
            var testsBySource = from test in tests
                                group test by test.Source into testGroup
                                select new { Source = testGroup.Key, Tests = testGroup };

            foreach (var group in testsBySource)
            {
                this.ExecuteTestsInSource(group.Tests, runContext, frameworkHandle, group.Source, isDeploymentDone);
            }
        }
Ejemplo n.º 50
0
 public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     RunTestsWithTestsCallback?.Invoke(tests, runContext, frameworkHandle);
 }
 public DebuggedProcessLauncher(IFrameworkHandle handle)
 {
     _frameworkHandle = handle;
 }
Ejemplo n.º 52
0
 public IEnumerable <Tuple <Uri, string> > CallGetExecutorUriExtensionMap(
     IFrameworkHandle testExecutorFrameworkHandle, RunContext runContext)
 {
     return(this.GetExecutorUriExtensionMap(testExecutorFrameworkHandle, runContext));
 }
Ejemplo n.º 53
0
 public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     throw new NotImplementedException();
 }
Ejemplo n.º 54
0
        private void RunTestCases(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle, NodejsProjectSettings settings)
        {
            // May be null, but this is handled by RunTestCase if it matters.
            // No VS instance just means no debugging, but everything else is
            // okay.
            if (tests.Count() == 0)
            {
                return;
            }

            using (var app = VisualStudioApp.FromEnvironmentVariable(NodejsConstants.NodeToolsProcessIdEnvironmentVariable))
            {
                var port     = 0;
                var nodeArgs = new List <string>();
                // .njsproj file path -> project settings
                var sourceToSettings = new Dictionary <string, NodejsProjectSettings>();
                var testObjects      = new List <TestCaseObject>();

                if (!File.Exists(settings.NodeExePath))
                {
                    frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.NodeExePath);
                    return;
                }

                // All tests being run are for the same test file, so just use the first test listed to get the working dir
                var testInfo   = new NodejsTestInfo(tests.First().FullyQualifiedName);
                var workingDir = Path.GetDirectoryName(CommonUtils.GetAbsoluteFilePath(settings.WorkingDir, testInfo.ModulePath));

                var nodeVersion = Nodejs.GetNodeVersion(settings.NodeExePath);

                // We can only log telemetry when we're running in VS.
                // Since the required assemblies are not on disk if we're not running in VS, we have to reference them in a separate method
                // this way the .NET framework only tries to load the assemblies when we actually need them.
                if (app != null)
                {
                    LogTelemetry(tests.Count(), nodeVersion, runContext.IsBeingDebugged);
                }

                foreach (var test in tests)
                {
                    if (_cancelRequested.WaitOne(0))
                    {
                        break;
                    }

                    if (settings == null)
                    {
                        frameworkHandle.SendMessage(
                            TestMessageLevel.Error,
                            $"Unable to determine interpreter to use for {test.Source}.");
                        frameworkHandle.RecordEnd(test, TestOutcome.Failed);
                    }

                    var args = new List <string>();
                    args.AddRange(GetInterpreterArgs(test, workingDir, settings.ProjectRootDir));

                    // Fetch the run_tests argument for starting node.exe if not specified yet
                    if (nodeArgs.Count == 0 && args.Count > 0)
                    {
                        nodeArgs.Add(args[0]);
                    }

                    testObjects.Add(new TestCaseObject(args[1], args[2], args[3], args[4], args[5]));
                }

                if (runContext.IsBeingDebugged && app != null)
                {
                    app.GetDTE().Debugger.DetachAll();
                    // Ensure that --debug-brk is the first argument
                    nodeArgs.InsertRange(0, GetDebugArgs(out port));
                }

                _nodeProcess = ProcessOutput.Run(
                    settings.NodeExePath,
                    nodeArgs,
                    settings.WorkingDir,
                    /* env */ null,
                    /* visible */ false,
                    /* redirector */ new TestExecutionRedirector(this.ProcessTestRunnerEmit),
                    /* quote args */ false);

                if (runContext.IsBeingDebugged && app != null)
                {
                    try
                    {
                        //the '#ping=0' is a special flag to tell VS node debugger not to connect to the port,
                        //because a connection carries the consequence of setting off --debug-brk, and breakpoints will be missed.
                        var qualifierUri = string.Format("tcp://localhost:{0}#ping=0", port);
                        while (!app.AttachToProcess(_nodeProcess, NodejsRemoteDebugPortSupplierUnsecuredId, qualifierUri))
                        {
                            if (_nodeProcess.Wait(TimeSpan.FromMilliseconds(500)))
                            {
                                break;
                            }
                        }
#if DEBUG
                    }
                    catch (COMException ex)
                    {
                        frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee.");
                        frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString());
                        KillNodeProcess();
                    }
#else
                    } catch (COMException) {
Ejemplo n.º 55
0
 public void CallInvokeExecutor(LazyExtension <ITestExecutor, ITestExecutorCapabilities> executor,
                                Tuple <Uri, string> executorUriExtensionTuple, RunContext runContext, IFrameworkHandle frameworkHandle)
 {
     this.InvokeExecutor(executor, executorUriExtensionTuple, runContext, frameworkHandle);
 }
Ejemplo n.º 56
0
 public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle)
 {
     frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running sources directly");
 }
Ejemplo n.º 57
0
        /// <summary>
        /// Execute the parameter tests present in parameter source
        /// </summary>
        /// <param name="tests">Tests to execute.</param>
        /// <param name="runContext">The run context.</param>
        /// <param name="frameworkHandle">Handle to record test start/end/results.</param>
        /// <param name="source">The test container for the tests.</param>
        /// <param name="isDeploymentDone">Indicates if deployment is done.</param>
        private void ExecuteTestsInSource(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle, string source, bool isDeploymentDone)
        {
            Debug.Assert(!string.IsNullOrEmpty(source), "Source cannot be empty");

            if (isDeploymentDone)
            {
                source = Path.Combine(PlatformServiceProvider.Instance.TestDeployment.GetDeploymentDirectory(), Path.GetFileName(source));
            }

            using (var isolationHost = PlatformServiceProvider.Instance.CreateTestSourceHost(source, runContext?.RunSettings, frameworkHandle))
            {
                // Create an instance of a type defined in adapter so that adapter gets loaded in the child app domain
                var testRunner = isolationHost.CreateInstanceForType(
                    typeof(UnitTestRunner),
                    new object[] { MSTestSettings.CurrentSettings }) as UnitTestRunner;

                // After loading adapter reset the chils-domain's appbase to point to test source location
                isolationHost.UpdateAppBaseToTestSourceLocation();

                PlatformServiceProvider.Instance.AdapterTraceLogger.LogInfo("Created unit-test runner {0}", source);

                // Default test set is filtered tests based on user provided filter criteria
                IEnumerable <TestCase> testsToRun = Enumerable.Empty <TestCase>();
                var filterExpression = this.TestMethodFilter.GetFilterExpression(runContext, frameworkHandle, out var filterHasError);
                if (filterHasError)
                {
                    // Bail out without processing everything else below.
                    return;
                }

                testsToRun = tests.Where(t => MatchTestFilter(filterExpression, t, this.TestMethodFilter));

                // this is done so that appropriate values of testcontext properties are set at source level
                // and are merged with session level parameters
                var sourceLevelParameters = PlatformServiceProvider.Instance.SettingsProvider.GetProperties(source);

                if (this.sessionParameters != null && this.sessionParameters.Count > 0)
                {
                    sourceLevelParameters = sourceLevelParameters.Concat(this.sessionParameters).ToDictionary(x => x.Key, x => x.Value);
                }

                var sourceSettingsProvider = isolationHost.CreateInstanceForType(
                    typeof(TestAssemblySettingsProvider),
                    null) as TestAssemblySettingsProvider;

                var sourceSettings  = sourceSettingsProvider.GetSettings(source);
                var parallelWorkers = sourceSettings.Workers;
                var parallelScope   = sourceSettings.Scope;

                if (MSTestSettings.CurrentSettings.ParallelizationWorkers.HasValue)
                {
                    // The runsettings value takes precedence over an assembly level setting. Reset the level.
                    parallelWorkers = MSTestSettings.CurrentSettings.ParallelizationWorkers.Value;
                }

                if (MSTestSettings.CurrentSettings.ParallelizationScope.HasValue)
                {
                    // The runsettings value takes precedence over an assembly level setting. Reset the level.
                    parallelScope = MSTestSettings.CurrentSettings.ParallelizationScope.Value;
                }

                if (!MSTestSettings.CurrentSettings.DisableParallelization && sourceSettings.CanParallelizeAssembly && parallelWorkers > 0)
                {
                    // Parallelization is enabled. Let's do further classification for sets.
                    var logger = (IMessageLogger)frameworkHandle;
                    logger.SendMessage(
                        TestMessageLevel.Informational,
                        string.Format(CultureInfo.CurrentCulture, Resource.TestParallelizationBanner, source, parallelWorkers, parallelScope));

                    // Create test sets for execution, we can execute them in parallel based on parallel settings
                    IEnumerable <IGrouping <bool, TestCase> > testsets = Enumerable.Empty <IGrouping <bool, TestCase> >();

                    // Parallel and not parallel sets.
                    testsets = testsToRun.GroupBy(t => t.GetPropertyValue <bool>(TestAdapter.Constants.DoNotParallelizeProperty, false));

                    var parallelizableTestSet    = testsets.FirstOrDefault(g => g.Key == false);
                    var nonparallelizableTestSet = testsets.FirstOrDefault(g => g.Key == true);

                    if (parallelizableTestSet != null)
                    {
                        ConcurrentQueue <IEnumerable <TestCase> > queue = null;

                        // Chunk the sets into further groups based on parallel level
                        switch (parallelScope)
                        {
                        case ExecutionScope.MethodLevel:
                            queue = new ConcurrentQueue <IEnumerable <TestCase> >(parallelizableTestSet.Select(t => new[] { t }));
                            break;

                        case ExecutionScope.ClassLevel:
                            queue = new ConcurrentQueue <IEnumerable <TestCase> >(parallelizableTestSet.GroupBy(t => t.GetPropertyValue(TestAdapter.Constants.TestClassNameProperty) as string));
                            break;
                        }

                        var tasks = new List <Task>();

                        for (int i = 0; i < parallelWorkers; i++)
                        {
                            tasks.Add(Task.Factory.StartNew(
                                          () =>
                            {
                                while (!queue.IsEmpty)
                                {
                                    if (this.cancellationToken != null && this.cancellationToken.Canceled)
                                    {
                                        // if a cancellation has been requested, do not queue any more test runs.
                                        break;
                                    }

                                    if (queue.TryDequeue(out IEnumerable <TestCase> testSet))
                                    {
                                        this.ExecuteTestsWithTestRunner(testSet, runContext, frameworkHandle, source, sourceLevelParameters, testRunner);
                                    }
                                }
                            },
                                          CancellationToken.None,
                                          TaskCreationOptions.LongRunning,
                                          TaskScheduler.Default));
                        }

                        Task.WaitAll(tasks.ToArray());
                    }

                    // Queue the non parallel set
                    if (nonparallelizableTestSet != null)
                    {
                        this.ExecuteTestsWithTestRunner(nonparallelizableTestSet, runContext, frameworkHandle, source, sourceLevelParameters, testRunner);
                    }
                }
                else
                {
                    this.ExecuteTestsWithTestRunner(testsToRun, runContext, frameworkHandle, source, sourceLevelParameters, testRunner);
                }

                this.RunCleanup(frameworkHandle, testRunner);

                PlatformServiceProvider.Instance.AdapterTraceLogger.LogInfo(
                    "Executed tests belonging to source {0}",
                    source);
            }
        }
Ejemplo n.º 58
0
        private void TryRunTests(IEnumerable <VsTestCase> vsTestCasesToRun, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            var stopwatch = StartStopWatchAndInitEnvironment(runContext, frameworkHandle);

            if (!AbleToRun(runContext))
            {
                return;
            }

            var           vsTestCasesToRunAsArray = vsTestCasesToRun as VsTestCase[] ?? vsTestCasesToRun.ToArray();
            ISet <string> allTraitNames           = GetAllTraitNames(vsTestCasesToRunAsArray.Select(tc => tc.ToTestCase()));
            var           filter = new TestCaseFilter(runContext, allTraitNames, _logger);

            vsTestCasesToRun = filter.Filter(vsTestCasesToRunAsArray);

            ICollection <TestCase> testCasesToRun = vsTestCasesToRun.Select(tc => tc.ToTestCase()).ToArray();

            DoRunTests(testCasesToRun, runContext, frameworkHandle);

            stopwatch.Stop();
            _logger.LogInfo($"Google Test execution completed, overall duration: {stopwatch.Elapsed}.");
        }
Ejemplo n.º 59
0
        private void TryRunTests(IEnumerable <string> executables, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            var stopwatch = StartStopWatchAndInitEnvironment(runContext, frameworkHandle);

            if (!AbleToRun(runContext))
            {
                return;
            }

            IList <TestCase> allTestCasesInExecutables = GetAllTestCasesInExecutables(executables).ToList();

            ISet <string>     allTraitNames    = GetAllTraitNames(allTestCasesInExecutables);
            var               filter           = new TestCaseFilter(runContext, allTraitNames, _logger);
            List <VsTestCase> vsTestCasesToRun =
                filter.Filter(allTestCasesInExecutables.Select(tc => tc.ToVsTestCase())).ToList();
            ICollection <TestCase> testCasesToRun =
                allTestCasesInExecutables.Where(
                    tc => vsTestCasesToRun.Any(vtc => tc.FullyQualifiedName == vtc.FullyQualifiedName)).ToArray();

            DoRunTests(testCasesToRun, runContext, frameworkHandle);

            stopwatch.Stop();
            _logger.LogInfo($"Google Test execution completed, overall duration: {stopwatch.Elapsed}.");
        }
Ejemplo n.º 60
0
        public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            var CatchExe = tests.First().Source;
            var timer    = Stopwatch.StartNew();

            // Get a list of all test case names
            var listOfTestCases = tests.Aggregate("", (acc, test) => acc + test.DisplayName + "\n");

            // Use the directory of the executable as the working directory.
            string workingDirectory = System.IO.Path.GetDirectoryName(CatchExe);

            if (workingDirectory == "")
            {
                workingDirectory = ".";
            }

            // Write them to the input file for Catch runner
            string caseFile = "test.cases";

            System.IO.File.WriteAllText(
                workingDirectory + System.IO.Path.DirectorySeparatorChar + caseFile,
                listOfTestCases);
            string originalDirectory = Directory.GetCurrentDirectory();

            // Execute the tests
            IList <string> output_text;

            string arguments = "-r xml --durations yes --input-file=" + caseFile;

            if (runContext.IsBeingDebugged)
            {
                output_text = ProcessRunner.RunDebugProcess(frameworkHandle, CatchExe, arguments, workingDirectory);
            }
            else
            {
                output_text = ProcessRunner.RunProcess(CatchExe, arguments, workingDirectory);
            }

            timer.Stop();
            frameworkHandle.SendMessage(TestMessageLevel.Informational, "Overall time " + timer.Elapsed.ToString());

            // Output as a single string.
            string output = output_text.Aggregate("", (acc, add) => acc + add);

            System.IO.MemoryStream reader = new System.IO.MemoryStream(System.Text.Encoding.ASCII.GetBytes(output));
            var serializer  = new XmlSerializer(typeof(CatchTestAdapter.Tests.Catch));
            var catchResult = (CatchTestAdapter.Tests.Catch)serializer.Deserialize(reader);

            foreach (var testCase in catchResult.TestCases)
            {
                // Find the matching test case
                var test       = tests.Where((test_case) => test_case.DisplayName == testCase.Name).ElementAt(0);
                var testResult = new TestResult(test);
                // Add the test execution time provided by Catch to the result.
                var testTime = testCase.Result.Duration;
                testResult.Duration = TimeSpan.FromSeconds(Double.Parse(testTime, CultureInfo.InvariantCulture));
                if (testCase.Result.Success == "true")
                {
                    testResult.Outcome = TestOutcome.Passed;
                }
                else
                {
                    // Mark failure.
                    testResult.Outcome = TestOutcome.Failed;

                    // Parse the failure to a flat result.
                    List <FlatResult> failures = GetFlatFailure(testCase);
                    testResult.ErrorMessage = $"{Environment.NewLine}";
                    for (int i = 1; i <= failures.Count; ++i)
                    {
                        var failure = failures[i - 1];
                        // Populate the error message.
                        var newline = failure.SectionPath.IndexOf("\n");
                        if (newline != -1)
                        {
                            // Remove first line of the SectionPath, which is the test case name.
                            failure.SectionPath      = failure.SectionPath.Substring(failure.SectionPath.IndexOf("\n") + 1);
                            testResult.ErrorMessage += $"#{i} - {failure.SectionPath}{Environment.NewLine}{failure.Expression}{Environment.NewLine}";
                        }
                        else
                        {
                            testResult.ErrorMessage += $"#{i} - {failure.Expression}{Environment.NewLine}";
                        }
                        // And the error stack.
                        testResult.ErrorStackTrace += $"at #{i} - {test.DisplayName}() in {failure.FilePath}:line {failure.LineNumber}{Environment.NewLine}";
                    }
                }
                // Finally record the result.
                frameworkHandle.RecordResult(testResult);
            }
            // Remove the temporary input file.
            System.IO.File.Delete(caseFile);
        }