private void TestMutants(IEnumerable <Mutant> mutantsToTest) { var mutantGroups = BuildMutantGroupsForTest(mutantsToTest.ToList()); var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = _options.ConcurrentTestrunners }; Parallel.ForEach(mutantGroups, parallelOptions, mutants => { var reportedMutants = new HashSet <Mutant>(); bool testUpdateHandler(IReadOnlyList <Mutant> testedMutants, ITestListDescription failedTests, ITestListDescription ranTests, ITestListDescription timedOutTest) { var continueTestRun = !_options.Optimizations.HasFlag(OptimizationFlags.AbortTestOnKill); foreach (var mutant in testedMutants) { mutant.AnalyzeTestRun(failedTests, ranTests, timedOutTest); if (mutant.ResultStatus == MutantStatus.NotRun) { continueTestRun = true; // Not all mutants in this group were tested so we continue } OnMutantTested(mutant, reportedMutants); // Report on mutant that has been tested } return(continueTestRun); } _mutationTestExecutor.Test(mutants, Input.TimeoutMs, testUpdateHandler); OnMutantsTested(mutants, reportedMutants); }); }
public StrykerRunResult Test(StrykerOptions options) { var mutantsNotRun = _input.ProjectInfo.ProjectContents.Mutants.Where(x => x.ResultStatus == MutantStatus.NotRun).ToList(); if (!mutantsNotRun.Any()) { if (_input.ProjectInfo.ProjectContents.Mutants.Any(x => x.ResultStatus == MutantStatus.Skipped)) { _logger.LogWarning("It looks like all mutants with tests were excluded. Try a re-run with less exclusion!"); } if (_input.ProjectInfo.ProjectContents.Mutants.Any(x => x.ResultStatus == MutantStatus.NoCoverage)) { _logger.LogWarning("It looks like all non-excluded mutants are not covered by a test. Go add some tests!"); } if (!_input.ProjectInfo.ProjectContents.Mutants.Any()) { _logger.LogWarning("It\'s a mutant-free world, nothing to test."); return(new StrykerRunResult(options, null)); } } var mutantsToTest = mutantsNotRun.Where(x => x.ResultStatus != MutantStatus.Skipped && x.ResultStatus != MutantStatus.NoCoverage); if (mutantsToTest.Any()) { _reporter.OnStartMutantTestRun(mutantsNotRun, _mutationTestExecutor.TestRunner.Tests); Parallel.ForEach( mutantsNotRun, new ParallelOptions { MaxDegreeOfParallelism = options.ConcurrentTestrunners }, mutant => { _mutationTestExecutor.Test(mutant, _input.TimeoutMs); _reporter.OnMutantTested(mutant); }); } _reporter.OnAllMutantsTested(_input.ProjectInfo.ProjectContents); _mutationTestExecutor.TestRunner.Dispose(); return(new StrykerRunResult(options, _input.ProjectInfo.ProjectContents.GetMutationScore())); }
private void TestMutants(IEnumerable <Mutant> mutantsToTest) { var mutantGroups = BuildMutantGroupsForTest(mutantsToTest.ToList()); var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = _options.Concurrency }; var testsFailingInitialy = Input.InitialTestRun.Result.FailingTests.GetGuids().ToHashSet(); Parallel.ForEach(mutantGroups, parallelOptions, mutants => { var reportedMutants = new HashSet <Mutant>(); bool testUpdateHandler(IReadOnlyList <Mutant> testedMutants, ITestGuids failedTests, ITestGuids ranTests, ITestGuids timedOutTest) { var continueTestRun = _options.OptimizationMode.HasFlag(OptimizationModes.DisableBail); if (testsFailingInitialy.Count > 0 && failedTests.GetGuids().Any(id => testsFailingInitialy.Contains(id))) { // some of the failing tests where failing without any mutation // we discard those tests failedTests = new TestsGuidList( failedTests.GetGuids().Where(t => !testsFailingInitialy.Contains(t))); } foreach (var mutant in testedMutants) { mutant.AnalyzeTestRun(failedTests, ranTests, timedOutTest); if (mutant.ResultStatus == MutantStatus.NotRun) { continueTestRun = true; // Not all mutants in this group were tested so we continue } OnMutantTested(mutant, reportedMutants); // Report on mutant that has been tested } return(continueTestRun); } _mutationTestExecutor.Test(mutants, Input.InitialTestRun.TimeoutValueCalculator, testUpdateHandler); OnMutantsTested(mutants, reportedMutants); }); }
public StrykerRunResult Test(StrykerOptions options) { var viableMutantsCount = _input.ProjectInfo.ProjectContents.Mutants.Count(x => x.CountForStats); var mutantsNotRun = _input.ProjectInfo.ProjectContents.Mutants.Where(x => x.ResultStatus == MutantStatus.NotRun).ToList(); if (!mutantsNotRun.Any()) { if (_input.ProjectInfo.ProjectContents.Mutants.Any(x => x.ResultStatus == MutantStatus.Ignored)) { _logger.LogWarning("It looks like all mutants with tests were excluded. Try a re-run with less exclusion!"); } if (_input.ProjectInfo.ProjectContents.Mutants.Any(x => x.ResultStatus == MutantStatus.NoCoverage)) { _logger.LogWarning("It looks like all non-excluded mutants are not covered by a test. Go add some tests!"); } if (!_input.ProjectInfo.ProjectContents.Mutants.Any()) { _logger.LogWarning("It\'s a mutant-free world, nothing to test."); return(new StrykerRunResult(options, double.NaN)); } } var mutantsToTest = mutantsNotRun; if (_options.Optimizations.HasFlag(OptimizationFlags.CoverageBasedTest)) { var testCount = _mutationTestExecutor.TestRunner.DiscoverNumberOfTests(); var toTest = mutantsNotRun.Sum(x => x.MustRunAgainstAllTests ? testCount : x.CoveringTests.Count); var total = testCount * viableMutantsCount; if (total > 0 && total != toTest) { _logger.LogInformation($"Coverage analysis will reduce run time by discarding {(total - toTest) / (double)total:P1} of tests because they would not change results."); } } else if (_options.Optimizations.HasFlag(OptimizationFlags.SkipUncoveredMutants)) { var total = viableMutantsCount; var toTest = mutantsToTest.Count(); if (total > 0 && total != toTest) { _logger.LogInformation($"Coverage analysis will reduce run time by discarding {(total - toTest) / (double)total:P1} of tests because they would not change results."); } } if (mutantsToTest.Any()) { var mutantGroups = BuildMutantGroupsForTest(mutantsNotRun); _reporter.OnStartMutantTestRun(mutantsNotRun, _mutationTestExecutor.TestRunner.Tests); Parallel.ForEach( mutantGroups, new ParallelOptions { MaxDegreeOfParallelism = options.ConcurrentTestrunners }, mutants => { var testMutants = new HashSet <Mutant>(); _mutationTestExecutor.Test(mutants, _input.TimeoutMs, (testedMutants, failedTests, ranTests, timedOutTest) => { var mustGoOn = !options.Optimizations.HasFlag(OptimizationFlags.AbortTestOnKill); foreach (var mutant in testedMutants) { mutant.AnalyzeTestRun(failedTests, ranTests, timedOutTest); if (mutant.ResultStatus == MutantStatus.NotRun) { mustGoOn = true; } else if (!testMutants.Contains(mutant)) { testMutants.Add(mutant); _reporter.OnMutantTested(mutant); } } return(mustGoOn); }); foreach (var mutant in mutants) { if (mutant.ResultStatus == MutantStatus.NotRun) { _logger.LogWarning($"Mutation {mutant.Id} was not fully tested."); } else if (!testMutants.Contains(mutant)) { _reporter.OnMutantTested(mutant); } } }); } _reporter.OnAllMutantsTested(_input.ProjectInfo.ProjectContents); _mutationTestExecutor.TestRunner.Dispose(); return(new StrykerRunResult(options, _input.ProjectInfo.ProjectContents.GetMutationScore())); }