Пример #1
0
        public TestRunResult CaptureCoverage(bool cantUsePipe, bool cantUseUnloadAppDomain)
        {
            if (cantUseUnloadAppDomain)
            {
                _logger.LogWarning("Can't capture the test coverage as the target framework does not support 'AppDomain'. ");
                return new TestRunResult() { Success = true };
            }

            if (cantUsePipe)
            {
                _logger.LogDebug("Target framework does not support NamedPipes. Stryker will use environment variables instead.");
            }
            if (_flags.HasFlag(OptimizationFlags.SkipUncoveredMutants) || _flags.HasFlag(OptimizationFlags.CoverageBasedTest))
            {
                var collector = new CoverageCollector();
                collector.SetLogger((message) => _logger.LogTrace(message));
                collector.Init(!cantUsePipe);
                var coverageEnvironment = collector.GetEnvironmentVariables();
                var result = LaunchTestProcess(null, coverageEnvironment);

                var data = collector.RetrieveCoverData();
                var coveredMutants = data.Split(";")[0].Split(",", StringSplitOptions.RemoveEmptyEntries);

                CoverageMutants.DeclareCoveredMutants(coveredMutants.Select(int.Parse));
                return result;
            }

            return new TestRunResult(){Success = true};
        }
Пример #2
0
        public TestRunResult CaptureCoverage()
        {
            TestRunResult result;
            var           needCoverage = _flags.HasFlag(OptimizationFlags.CoverageBasedTest) || _flags.HasFlag(OptimizationFlags.SkipUncoveredMutants);

            if (needCoverage && _flags.HasFlag(OptimizationFlags.CaptureCoveragePerTest))
            {
                return(CaptureCoveragePerIsolatedTests());
            }
            var runner = TakeRunner();

            try
            {
                if (needCoverage)
                {
                    result    = runner.CaptureCoverage();
                    _coverage = runner.CoverageMutants;
                }
                else
                {
                    result = runner.RunAll(null, null);
                }
            }
            finally
            {
                ReturnRunner(runner);
            }
            return(result);
        }
Пример #3
0
        public TestRunResult CaptureCoverage(bool cantUsePipe, bool cantUseUnloadAppDomain)
        {
            var needCoverage = _flags.HasFlag(OptimizationFlags.CoverageBasedTest) || _flags.HasFlag(OptimizationFlags.SkipUncoveredMutants);

            if (needCoverage && _flags.HasFlag(OptimizationFlags.CaptureCoveragePerTest))
            {
                return(CaptureCoveragePerIsolatedTests(cantUsePipe));
            }

            var           runner = TakeRunner();
            TestRunResult result;

            try
            {
                if (needCoverage)
                {
                    result          = runner.CaptureCoverage(cantUsePipe, cantUseUnloadAppDomain);
                    CoverageMutants = runner.CoverageMutants;
                }
                else
                {
                    result = runner.RunAll(null, null);
                }
            }
            finally
            {
                ReturnRunner(runner);
            }
            return(result);
        }
Пример #4
0
 public VsTestRunner(
     StrykerOptions options,
     OptimizationFlags flags,
     ProjectInfo projectInfo,
     ICollection <TestCase> testCasesDiscovered,
     TestCoverageInfos mappingInfos,
     IFileSystem fileSystem        = null,
     IVsTestHelper helper          = null,
     ILogger logger                = null,
     IVsTestConsoleWrapper wrapper = null,
     Func <IDictionary <string, string>, int, IStrykerTestHostLauncher> hostBuilder = null)
 {
     _logger      = logger ?? ApplicationLogging.LoggerFactory.CreateLogger <VsTestRunner>();
     _fileSystem  = fileSystem ?? new FileSystem();
     _options     = options;
     _flags       = flags;
     _projectInfo = projectInfo;
     _hostBuilder = hostBuilder ?? ((dico, id) => new StrykerVsTestHostLauncher(dico, id));
     SetListOfTests(testCasesDiscovered);
     _ownHelper      = helper == null;
     _vsTestHelper   = helper ?? new VsTestHelper();
     CoverageMutants = mappingInfos ?? new TestCoverageInfos();
     _vsTestConsole  = wrapper ?? PrepareVsTestConsole();
     _id             = _count++;
     if (testCasesDiscovered != null)
     {
         _discoveredTests = testCasesDiscovered;
         DetectTestFramework(testCasesDiscovered);
     }
     InitializeVsTestConsole();
     _coverageEnvironment = new Dictionary <string, string>
     {
         { CoverageCollector.ModeEnvironmentVariable, flags.HasFlag(OptimizationFlags.UseEnvVariable) ? CoverageCollector.EnvMode : CoverageCollector.PipeMode }
     };
 }
Пример #5
0
        public TestRunResult CaptureCoverage(IEnumerable <Mutant> mutants, bool cantUseUnloadAppDomain, bool cantUsePipe)
        {
            if (!_flags.HasFlag(OptimizationFlags.SkipUncoveredMutants) &&
                !_flags.HasFlag(OptimizationFlags.CoverageBasedTest))
            {
                return(new TestRunResult(true));
            }

            if (cantUseUnloadAppDomain)
            {
                _logger.LogWarning("Can't capture the test coverage as the target framework does not support 'AppDomain'. ");
                return(new TestRunResult(true));
            }

            if (cantUsePipe)
            {
                _logger.LogWarning("Can't capture the test coverage as the target framework does not support pipes. ");
                return(new TestRunResult(true));
            }

            var collector = new CoverageCollector();

            collector.SetLogger(message => _logger.LogTrace(message));
            var coverageEnvironment = new Dictionary <string, string>();

            coverageEnvironment["Coverage"] = $"pipe:{_server.PipeName}";

            var result = LaunchTestProcess(null, coverageEnvironment);

            if (!WaitOnLck(_lck, () => _lastMessage != null, 5000))
            {
                // Failed to retrieve coverage data for testCase
                return(null);
            }

            var testedMutant = _lastMessage.Split(";")[0].Split(",").Select(int.Parse).ToList();

            foreach (var mutant in mutants)
            {
                mutant.CoveringTests = testedMutant.Contains(mutant.Id) ? TestListDescription.EveryTest() : new TestListDescription(null);
            }

            return(result);
        }
Пример #6
0
        public TestRunResult CaptureCoverage()
        {
            if (_flags.HasFlag(OptimizationFlags.SkipUncoveredMutants) || _flags.HasFlag(OptimizationFlags.CoverageBasedTest))
            {
                var collector = new CoverageCollector();
                collector.SetLogger((message) => _logger.LogTrace(message));
                collector.Init(true);
                var coverageEnvironment = collector.GetEnvironmentVariables();
                var result = LaunchTestProcess(null, coverageEnvironment);

                var data = collector.RetrieveCoverData("full");

                CoverageMutants.DeclareCoveredMutants(data.Split(";")[0].Split(",").Select(int.Parse));
                return(result);
            }
            else
            {
                return(LaunchTestProcess(null, null));
            }
        }
Пример #7
0
        public TestRunResult RunAll(int?timeoutMs, int?mutationId)
        {
            var envVars = new Dictionary <string, string>();

            if (mutationId != null)
            {
                envVars["ActiveMutation"] = mutationId.ToString();
            }

            var testCases = (mutationId == null || !_flags.HasFlag(OptimizationFlags.CoverageBasedTest)) ? null : CoverageMutants.GetTests <TestCase>(mutationId.Value);

            if (testCases == null)
            {
                _logger.LogDebug($"Runner {_id}: Testing {mutationId} against all tests.");
            }
            else
            {
                _logger.LogDebug($"Runner {_id}: Testing {mutationId} against:{string.Join(", ", testCases.Select(x => x.FullyQualifiedName))}.");
            }
            return(RunVsTest(testCases, timeoutMs, envVars));
        }
Пример #8
0
        public TestRunResult RunAll(int?timeoutMs, IReadOnlyMutant mutant)
        {
            var envVars = new Dictionary <string, string>();

            if (mutant != null)
            {
                envVars["ActiveMutation"] = mutant.Id.ToString();
            }

            if (_flags.HasFlag(OptimizationFlags.CoverageBasedTest) && mutant != null && (mutant.CoveringTest == null || mutant.CoveringTest.Count == 0))
            {
                return(new TestRunResult {
                    ResultMessage = "Not covered by any test", Success = true
                });
            }

            IEnumerable <TestCase> testCases = null;

            // if we optimize the number of test to run
            if (mutant != null && _flags.HasFlag(OptimizationFlags.CoverageBasedTest))
            {
                // we must run all tests if the mutants needs it (static) except when coverage has been captured by isolated test
                testCases = (mutant.MustRunAllTests && !_flags.HasFlag(OptimizationFlags.CaptureCoveragePerTest))
                   ? null : _discoveredTests.Where(t => mutant.CoveringTest.ContainsKey(t.Id.ToString())).ToList();
                if (testCases == null)
                {
                    _logger.LogDebug($"Runner {_id}: Testing {mutant} against all tests.");
                }
                else
                {
                    _logger.LogDebug($"Runner {_id}: Testing {mutant} against:{string.Join(", ", testCases.Select(x => x.FullyQualifiedName))}.");
                }
            }
            return(RunVsTest(testCases, timeoutMs, envVars));
        }
Пример #9
0
        public TestRunResult CaptureCoverage(IEnumerable <Mutant> mutants, bool cantUseAppDomain, bool cantUsePipe)
        {
            var needCoverage = _flags.HasFlag(OptimizationFlags.CoverageBasedTest) || _flags.HasFlag(OptimizationFlags.SkipUncoveredMutants);

            if (needCoverage && _flags.HasFlag(OptimizationFlags.CaptureCoveragePerTest))
            {
                return(CaptureCoveragePerIsolatedTests(mutants, cantUseAppDomain, cantUsePipe));
            }

            var           runner = TakeRunner();
            TestRunResult result;

            try
            {
                result = needCoverage ? runner.CaptureCoverage(mutants, cantUseAppDomain, cantUsePipe) : runner.RunAll(null, null, null);
            }
            finally
            {
                ReturnRunner(runner);
            }
            return(result);
        }
Пример #10
0
 public VsTestRunner(StrykerOptions options,
                     OptimizationFlags flags,
                     ProjectInfo projectInfo,
                     ICollection <TestCase> testCasesDiscovered,
                     TestCoverageInfos mappingInfos,
                     IFileSystem fileSystem = null,
                     VsTestHelper helper    = null)
 {
     _fileSystem  = fileSystem ?? new FileSystem();
     _options     = options;
     _flags       = flags;
     _projectInfo = projectInfo;
     SetListOfTests(testCasesDiscovered);
     _ownHelper      = helper == null;
     _vsTestHelper   = helper ?? new VsTestHelper();
     CoverageMutants = mappingInfos ?? new TestCoverageInfos();
     _vsTestConsole  = PrepareVsTestConsole();
     _id             = _count++;
     InitializeVsTestConsole();
     _coverageEnvironment = new Dictionary <string, string>
     {
         { CoverageCollector.ModeEnvironmentVariable, flags.HasFlag(OptimizationFlags.UseEnvVariable) ? CoverageCollector.EnvMode : CoverageCollector.PipeMode }
     };
 }
Пример #11
0
        public TestRunResult TestMultipleMutants(int?timeoutMs, IReadOnlyList <Mutant> mutants, TestUpdateHandler update)
        {
            var mutantTestsMap = new Dictionary <int, IList <string> >();
            ICollection <TestCase> testCases = null;

            if (mutants != null)
            {
                // if we optimize the number of tests to run
                if (_flags.HasFlag(OptimizationFlags.CoverageBasedTest))
                {
                    var needAll = false;
                    foreach (var mutant in mutants)
                    {
                        List <string> tests;
                        if ((mutant.IsStaticValue && !_flags.HasFlag(OptimizationFlags.CaptureCoveragePerTest)) || mutant.MustRunAgainstAllTests)
                        {
                            tests   = null;
                            needAll = true;
                        }
                        else
                        {
                            tests = mutant.CoveringTests.GetList().Select(t => t.Guid).ToList();
                        }
                        mutantTestsMap.Add(mutant.Id, tests);
                    }

                    testCases = needAll ? null : mutants.SelectMany(m => m.CoveringTests.GetList()).Distinct().Select(t => _discoveredTests.First(tc => tc.Id.ToString() == t.Guid)).ToList();

                    _logger.LogDebug($"{RunnerId}: Testing [{string.Join(',', mutants.Select(m => m.DisplayName))}] " +
                                     $"against {(testCases == null ? "all tests." : string.Join(", ", testCases.Select(x => x.FullyQualifiedName)))}.");
                    if (testCases?.Count == 0)
                    {
                        return(new TestRunResult(TestListDescription.NoTest(), TestListDescription.NoTest(), TestListDescription.NoTest(), "Mutants are not covered by any test!"));
                    }
                }
                else
                {
                    if (mutants.Count > 1)
                    {
                        throw new GeneralStrykerException("Internal error: trying to test multiple mutants simultaneously without 'perTest' coverage analysis.");
                    }
                    mutantTestsMap.Add(mutants.FirstOrDefault().Id, new List <string>());
                }
            }

            var expectedTests = testCases?.Count ?? DiscoverNumberOfTests();

            void HandleUpdate(IRunResults handler)
            {
                if (mutants == null)
                {
                    return;
                }
                var handlerTestResults = handler.TestResults;
                var tests = handlerTestResults.Count == DiscoverNumberOfTests()
                    ? TestListDescription.EveryTest()
                    : new TestListDescription(handlerTestResults.Select(tr => (TestDescription)tr.TestCase));
                var failedTest = new TestListDescription(handlerTestResults.Where(tr => tr.Outcome == TestOutcome.Failed)
                                                         .Select(tr => (TestDescription)tr.TestCase));
                var testsInProgress  = new TestListDescription(handler.TestsInTimeout?.Select(t => (TestDescription)t));
                var remainingMutants = update?.Invoke(mutants, failedTest, tests, testsInProgress);

                if (handlerTestResults.Count >= expectedTests || remainingMutants != false || _aborted)
                {
                    return;
                }
                // all mutants status have been resolved, we can stop
                _logger.LogDebug($"{RunnerId}: Each mutant's fate has been established, we can stop.");
                _vsTestConsole.CancelTestRun();
                _aborted = true;
            }

            var testResults   = RunTestSession(testCases, GenerateRunSettings(timeoutMs, mutants != null, false, mutantTestsMap), HandleUpdate);
            var resultAsArray = testResults.TestResults.ToArray();
            var timeout       = (!_aborted && resultAsArray.Length < expectedTests);
            var ranTests      = resultAsArray.Length == DiscoverNumberOfTests() ? TestListDescription.EveryTest() : new TestListDescription(resultAsArray.Select(tr => (TestDescription)tr.TestCase));
            var failedTests   = resultAsArray.Where(tr => tr.Outcome == TestOutcome.Failed).Select(tr => (TestDescription)tr.TestCase).ToImmutableArray();

            if (ranTests.Count == 0 && (testResults.TestsInTimeout == null || testResults.TestsInTimeout.Count == 0))
            {
                _logger.LogDebug($"{RunnerId}: Test session reports 0 result and 0 stuck tests.");
            }

            var message = string.Join(Environment.NewLine,
                                      resultAsArray.Where(tr => !string.IsNullOrWhiteSpace(tr.ErrorMessage))
                                      .Select(tr => tr.ErrorMessage));
            var failedTestsDescription = new TestListDescription(failedTests);
            var timedOutTests          = new TestListDescription(testResults.TestsInTimeout?.Select(t => (TestDescription)t));

            return(timeout ? TestRunResult.TimedOut(ranTests, failedTestsDescription, timedOutTests, message) : new TestRunResult(ranTests, failedTestsDescription, timedOutTests, message));
        }