private void AppendOverallStats(StringBuilder builder, CoverageData coverage) { builder.AppendLine(@" <stats>"); builder.AppendLine(string.Format(@" <srcfiles value=""{0}"" />", this.TotalSourceFiles)); builder.AppendLine(string.Format(@" <srclines value=""{0}"" />", this.TotalSourceLines)); builder.AppendLine(@" </stats>"); }
private void GetOverallStats(CoverageData coverage) { foreach (var pair in coverage) { TotalSourceFiles += 1; var fileData = pair.Value; var totalStatements = 0; if (fileData.LineExecutionCounts == null) { continue; } for (var i = 1; i < fileData.LineExecutionCounts.Length; i++) { var lineExecution = fileData.LineExecutionCounts[i]; if (lineExecution.HasValue) { totalStatements++; if (lineExecution > 0) { TotalSourceLinesCovered += 1; } } } TotalSourceLines += totalStatements; } }
public static void SplitCovFile(string coverageFile, string outputCoverageFilePath, bool includeSkippedMods) { try { if (!(CoverageData.IsValidCoverageFile(coverageFile))) { throw new InvalidCoverageFileException(coverageFile); } } catch (InvalidCoverageFileException) { throw; } catch (Exception ex) { throw new InvalidCoverageFileException(ex.Message, ex); } try { CoverageData.SplitCoverageFile(coverageFile, outputCoverageFilePath, includeSkippedMods, true); } catch (AnalysisException ex) { throw new InvalidCoverageFileException(ex.Message); } }
public MutatingWalker(MutantEnumerator mutator, OnMutant callback, CoverageData coverage, Dictionary <int, List <String> > testCaseCoverageByLineID) { _mutator = mutator; _onMutant = callback; _coverage = coverage; _testCaseCoverageByLineID = testCaseCoverageByLineID; }
private void CollectCoverage(byte[] instrumentedAssembly) { AppDomain domain = AppDomain.CreateDomain("Instrumentation", null, new AppDomainSetup() { ConfigurationFile = appConfigPath, ApplicationBase = outputPath }); AssemblyLoader handler = (AssemblyLoader)domain.CreateInstanceFromAndUnwrap(typeof(AssemblyLoader).Assembly.Location, typeof(AssemblyLoader).FullName); handler.Setup(dependencies, instrumentedAssembly, testAssembly); MSTestRunner test = (MSTestRunner)domain.CreateInstanceFromAndUnwrap(typeof(MSTestRunner).Assembly.Location, typeof(MSTestRunner).FullName); test.SetupCoverage(CoverageData.GetInstance().LineLocatorIDs, CoverageData.GetInstance().reverseLineLocatorIDs); Dictionary <string, Dictionary <int, long> > lineCountsByTests = test.RunTestsForCoverage(testAssembly); testCaseCoverageByLineID = new Dictionary <int, List <string> >(); foreach (var pair in lineCountsByTests) { foreach (var pair2 in pair.Value) { int lineID = pair2.Key; if (!testCaseCoverageByLineID.ContainsKey(lineID)) { testCaseCoverageByLineID[lineID] = new List <string>(); } testCaseCoverageByLineID[lineID].Add(pair.Key); } } AppDomain.Unload(domain); }
public List <Granule> GenerateGranules(CoverageData coverageData) { var granules = new List <Granule>(); for (var i = 0; i < coverageData.Count; i++) { var insideList = new List <int>(); var indexes = new List <int>(); for (var j = 0; j < coverageData[i].Count; j++) { if (coverageData[i][j] == 1) { indexes.Add(j); } } foreach (var checkRow in coverageData) { var result = indexes.All(x => checkRow[x] == 1) ? 1 : 0; insideList.Add(result); } var granule = new Granule(insideList, i + 1); granules.Add(granule); } return(granules); }
public void WriteCoverageFiles(CoverageData coverage) { var currentDirectory = Environment.CurrentDirectory; CoverageOutputGenerator.WriteHtmlFile(currentDirectory, coverage); CoverageOutputGenerator.WriteJsonFile(currentDirectory, coverage); }
public void Load_WhenCoverageFileValidatorReturnFalse_ThenShouldReturnNullWithErrorFromValidator() { //Arrange string error; var path = "path"; var content = new List <string> { "1;1;1", "1;0;1", "0;0;1" }; var coverageData = new CoverageData(new List <List <int> > { new List <int> { 1, 1, 1 }, new List <int> { 1, 0, 1 }, new List <int> { 0, 0, 1 } }); _fileServiceMock.Setup(x => x.GetPathFromOpenFileDialog(It.IsAny <string>())).Returns(path); _fileServiceMock.Setup(x => x.ReadFile(path, out error)).Returns(content); _coverageDataConverterMock.Setup(x => x.Convert(content, out error)).Returns(coverageData); _coverageFileValidatorMock.Setup(x => x.Valid(It.IsAny <CoverageFile>(), out error)) .Callback(CallbackOutErrorHelper.DelegateForObject1).Returns(false); //Act var result = _actionsService.Load(out error); //Assert Assert.Null(result); Assert.Equal(CallbackOutErrorHelper.ErrorMessage, error); }
public void ConvertContentToData_WhenPassContentWithSeparatorOnEndOfLine_ThenShouldConvertToDataWithoutError() { // Arrange var content = new List <string> { "1;2;3;", "1;2;3", "1;2;3" }; // Act var result = _coverageDataConverter.Convert(content, out var error); // Assert var expected = new CoverageData(new List <List <int> > { new List <int> { 1, 2, 3 }, new List <int> { 1, 2, 3 }, new List <int> { 1, 2, 3 } }); Assert.Equal(expected, result); Assert.Null(error); }
public GranuleSet GenerateGran(CoverageData coverageData) { var granules = GenerateGranules(coverageData); var granuleSet = BuildGranuleSet(granules); return(granuleSet); }
public void Load_WhenEverythingIsFine_ThenShouldReturnObjectWithoutError() { //Arrange string error; var path = "path"; var content = new List <string> { "1;1;1", "1;0;1", "0;0;1" }; var coverageData = new CoverageData(new List <List <int> > { new List <int> { 1, 1, 1 }, new List <int> { 1, 0, 1 }, new List <int> { 0, 0, 1 } }); _fileServiceMock.Setup(x => x.GetPathFromOpenFileDialog(It.IsAny <string>())).Returns(path); _fileServiceMock.Setup(x => x.ReadFile(path, out error)).Returns(content); _coverageDataConverterMock.Setup(x => x.Convert(content, out error)).Returns(coverageData); _coverageFileValidatorMock.Setup(x => x.Valid(It.IsAny <CoverageFile>(), out error)).Returns(true); _granuleServiceMock.Setup(x => x.GenerateGran(coverageData)).Returns(new GranuleSet()); //Act var result = _actionsService.Load(out error); //Assert Assert.NotNull(result); Assert.NotNull(result.GranuleSet); Assert.Equal(path, result.Path); Assert.Null(error); }
//--------------------------------------------------------------------- static void WriteCoverageDataOnly(CodedOutputStream outputStream) { var coverageData = CoverageData.CreateBuilder(); coverageData.SetName(coverageName); coverageData.SetModuleCount(moduleCount); coverageData.SetExitCode(exitCode); WriteMessage(outputStream, coverageData.Build()); }
public static string WriteHtmlFile(string path, CoverageData coverage) { using (var fileStream = new FileStream(path, FileMode.Create)) { GenerateHtml(coverage, fileStream); } return path; }
public static void GenerateHtml(CoverageData coverage, Stream stream) { using (var writer = new StreamWriter(stream)) { writer.WriteLine(HtmlFragments.BodyContentStartFormat, HtmlFragments.Js, HtmlFragments.Css); var totalLines = 0; var totalLinesCovered = 0; var fileNumber = 0; foreach (var pair in coverage) { fileNumber++; var fileName = pair.Key; var fileData = pair.Value; var totalSmts = 0; var linesCovered = 0; var markup = new string[fileData.SourceLines.Length + 1]; for (var i = 0; i < fileData.SourceLines.Length; i++) { markup[i + 1] = "<div class='{{executed}}'><span class=''>" + (i + 1) + "</span>" + HttpUtility.HtmlEncode(fileData.SourceLines[i]) + "</div>"; } for (var i = 1; i < fileData.LineExecutionCounts.Length; i++) { var lineExecution = fileData.LineExecutionCounts[i]; if (lineExecution.HasValue) { totalSmts++; if (lineExecution > 0) { linesCovered += 1; markup[i] = markup[i].Replace("{{executed}}", "hit"); } else { markup[i] = markup[i].Replace("{{executed}}", "miss"); } } else { markup[i] = markup[i].Replace("{{executed}}", ""); } } totalLinesCovered += linesCovered; totalLines += totalSmts; AppendResultLine(linesCovered, totalSmts, fileName, fileNumber, markup, writer); } AppendResultLine(totalLinesCovered, totalLines, "Total", 1+fileNumber, new string[0], writer); writer.WriteLine(HtmlFragments.BodyContentEnd); } }
public static string WriteHtmlFile(string path, CoverageData coverage) { using (var fileStream = new FileStream(path, FileMode.Create)) { GenerateHtml(coverage, fileStream); } return(path); }
public void GenerateGran_WhenSendCoverageData_ThenShouldCreateGranuleSet(CoverageData coverageData, List <Granule> expected) { //Arrange //Act var result = _granuleService.GenerateGranules(coverageData); //Assert Assert.Equal(expected, result); }
public static string WriteJsonFile(string directory, CoverageData coverage) { var path = Path.Combine(directory, Constants.CoverageJsonFileName); var serializer = new JsonSerializer(); File.WriteAllText(path, serializer.Serialize(coverage)); return(path); }
protected virtual string GetCodeCoverageMessage(CoverageData coverageData) { var message = string.Format("Code Coverage Results"); message += string.Format(" Average Coverage: {0:0%}\n", coverageData.CoveragePercentage); foreach (var fileData in coverageData) { message += string.Format(" {0:0%} for {1}\n", fileData.Value.CoveragePercentage, fileData.Key); } return message; }
public static string WriteHtmlFile(string directory, CoverageData coverage) { var path = Path.Combine(directory, Constants.CoverageHtmlFileName); using (var fileStream = new FileStream(path, FileMode.Create)) { GenerateHtml(coverage, fileStream); } return path; }
public static string WriteHtmlFile(string directory, CoverageData coverage) { var path = Path.Combine(directory, Constants.CoverageHtmlFileName); using (var fileStream = new FileStream(path, FileMode.Create)) { GenerateHtml(coverage, fileStream); } return(path); }
public void GenerateGran_WhenPutCoverageData_ThenShouldReturnGranuleSet() { //Arrange var coverageData = new CoverageData(new List <List <int> >()); //Act var result = _granuleService.GenerateGran(coverageData); //Assert Assert.NotNull(result); }
public static string WriteJsonFile(string path, CoverageData coverage) { using (var fileStream = new FileStream(path, FileMode.Create)) using (var writer = new StreamWriter(fileStream)) { var serializer = new JsonSerializer(); writer.Write(serializer.Serialize(coverage)); } return path; }
public void Will_calculate_coverage_percentage() { var coverageData = new CoverageData(); coverageData["file1"] = new CoverageFileData(); coverageData["file2"] = new CoverageFileData(); coverageData["file1"].LineExecutionCounts = new int?[]{/* index 0 ignored */ 0,0,1, null,0,1}; coverageData["file2"].LineExecutionCounts = new int?[]{/* index 0 ignored */ 0,5,1,2,1, null}; Assert.Equal(0.5, coverageData["file1"].CoveragePercentage); Assert.Equal(1, coverageData["file2"].CoveragePercentage); Assert.Equal(0.75, coverageData.CoveragePercentage); }
protected virtual string GetCodeCoverageMessage(CoverageData coverageData) { var message = string.Format("Code Coverage Results"); message += string.Format(" Average Coverage: {0:0%}\n", coverageData.CoveragePercentage); foreach (var fileData in coverageData) { message += string.Format(" {0:0%} for {1}\n", fileData.Value.CoveragePercentage, fileData.Key); } return(message); }
public static string WriteJsonFile(string path, CoverageData coverage) { using (var fileStream = new FileStream(path, FileMode.Create)) using (var writer = new StreamWriter(fileStream)) { var serializer = new JsonSerializer(); writer.Write(serializer.Serialize(coverage)); } return(path); }
public void Will_calculate_coverage_percentage() { var coverageData = new CoverageData(); coverageData["file1"] = new CoverageFileData(); coverageData["file2"] = new CoverageFileData(); coverageData["file1"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 0, 1, null, 0, 1 }; coverageData["file2"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 5, 1, 2, 1, null }; Assert.Equal(0.5, coverageData["file1"].CoveragePercentage); Assert.Equal(1, coverageData["file2"].CoveragePercentage); Assert.Equal(0.75, coverageData.CoveragePercentage); }
public void Valid_WhenPutEmptyDataInCoverageFile_ThenShouldReturnFalseWithError() { //Arrange var path = "path"; var coverageData = new CoverageData(new List <List <int> >()); var coverageFile = new CoverageFile(path, coverageData); //Act var result = _coverageFileValidator.Valid(coverageFile, out var errorMessage); //Assert Assert.False(result); Assert.NotEmpty(errorMessage); }
internal void AppendCoverageData(CoverageData fileCoverageObject) { if (fileCoverageObject != null) { if (CoverageObject == null) { CoverageObject = new CoverageData(); } CoverageObject.Merge(fileCoverageObject); } }
public void Will_merge_coverage_object_with_an_empty_one() { var coverageData = new CoverageData(); coverageData["file1"] = new CoverageFileData(); coverageData["file2"] = new CoverageFileData(); coverageData["file1"].LineExecutionCounts = new int?[] {/* index 0 ignored */ 0, 0, 1, null, 0, 1 }; coverageData["file2"].LineExecutionCounts = new int?[] {/* index 0 ignored */ 0, 5, 1, 2, 1, null }; var newCoverageData = new CoverageData(); newCoverageData.Merge(coverageData); Assert.Equal(0.5, newCoverageData["file1"].CoveragePercentage); Assert.Equal(1, newCoverageData["file2"].CoveragePercentage); Assert.Equal(0.75, newCoverageData.CoveragePercentage); }
private void AppendCoverageBySourceFile(StringBuilder builder, CoverageData coverage) { foreach (var pair in coverage) { var fileName = pair.Key; var fileData = pair.Value; if (fileData.LineExecutionCounts == null) { continue; } AppendCoverageForOneSourceFile(builder, fileName, fileData); } }
public CoverageData DeserializeCoverageObject(string json, TestContext testContext) { var data = jsonSerializer.Deserialize <BlanketCoverageObject>(json); IDictionary <string, string> generatedToOriginalFilePath = testContext.ReferencedFiles.Where(rf => rf.GeneratedFilePath != null).ToDictionary(rf => rf.GeneratedFilePath, rf => rf.Path); var coverageData = new CoverageData(testContext.TestFileSettings.CodeCoverageSuccessPercentage); // Rewrite all keys in the coverage object dictionary in order to change URIs // to paths and generated paths to original paths, then only keep the ones // that match the include/exclude patterns. foreach (var entry in data) { Uri uri = new Uri(entry.Key, UriKind.RelativeOrAbsolute); if (!uri.IsAbsoluteUri) { // Resolve against the test file path. string basePath = Path.GetDirectoryName(testContext.TestHarnessPath); uri = new Uri(Path.Combine(basePath, entry.Key)); } string filePath = uri.LocalPath; // Fix local paths of the form: file:///c:/zzz should become c:/zzz not /c:/zzz // but keep network paths of the form: file://network/files/zzz as //network/files/zzz filePath = RegexPatterns.InvalidPrefixedLocalFilePath.Replace(filePath, "$1"); var fileUri = new Uri(filePath, UriKind.RelativeOrAbsolute); filePath = fileUri.LocalPath; string newKey; if (!generatedToOriginalFilePath.TryGetValue(filePath, out newKey)) { newKey = filePath; } if (IsFileEligibleForInstrumentation(newKey)) { string[] sourceLines = fileSystem.GetLines(filePath); coverageData.Add(newKey, new CoverageFileData { LineExecutionCounts = entry.Value, FilePath = newKey, SourceLines = sourceLines }); } } return(coverageData); }
public void Will_merge_coverage_object_with_an_existing_one() { var coverageData1 = new CoverageData(); coverageData1["file1"] = new CoverageFileData(); coverageData1["file1"].LineExecutionCounts = new int?[] {/* index 0 ignored */ 0, 0, 2, 1, 0, 0 }; var coverageData2 = new CoverageData(); coverageData2["file1"] = new CoverageFileData(); coverageData2["file2"] = new CoverageFileData(); coverageData2["file1"].LineExecutionCounts = new int?[] {/* index 0 ignored */ 0, 0, 1, null, 0, 1 }; coverageData2["file2"].LineExecutionCounts = new int?[] {/* index 0 ignored */ 0, 5, 1, 2, 1, null }; coverageData1.Merge(coverageData2); Assert.Equal(0.6, coverageData1["file1"].CoveragePercentage); Assert.Equal(1, coverageData1["file2"].CoveragePercentage); Assert.Equal(0.778, Math.Round(coverageData1.CoveragePercentage,3)); }
/// <summary> /// Reload the LIR when the selected index is changed. /// </summary> /// <param name="sender">Combo box.</param> /// <param name="e">Event arguments.</param> private void OnIRMethodsSelectedIndexChanged(object sender, EventArgs e) { _txtIR.Text = ""; // Get the selected item CoverageItem methodItem = _cmbIRMethods.SelectedItem as CoverageItem; if (methodItem == null) { return; } // Get the IR instructions string fullTypeName = methodItem.Parent.Parent.Name + '.' + methodItem.Parent.Name; XElement[] instructions = CoverageData .Descendants("method") .Where(m => string.Compare(m.Attribute("name").Value, methodItem.Name, StringComparison.OrdinalIgnoreCase) == 0 && string.Compare(m.Attribute("class").Value, fullTypeName, StringComparison.OrdinalIgnoreCase) == 0) .Descendants("instruction") .ToArray(); // Build up the coverage display StringBuilder source = new StringBuilder(); foreach (XElement instruction in instructions) { source.AppendLine(instruction.Value); } _txtIR.Text = source.ToString(); // Highlight the coverage Dictionary <string, bool> coverage = methodItem.Children.ToDictionary(i => i.Name, i => i.Covered); for (int i = 0; i < instructions.Length; i++) { bool covered; string block = instructions[i].Attribute("block").Value; if (coverage.TryGetValue(block, out covered)) { HighlightLine(_txtIR, i, 0, -1, GetHighlightColor(covered)); } } }
public void Will_merge_coverage_object_with_an_empty_one() { var coverageData = new CoverageData(); coverageData["file1"] = new CoverageFileData(); coverageData["file2"] = new CoverageFileData(); coverageData["file1"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 0, 1, null, 0, 1 }; coverageData["file2"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 5, 1, 2, 1, null }; var newCoverageData = new CoverageData(); newCoverageData.Merge(coverageData); Assert.Equal(0.5, newCoverageData["file1"].CoveragePercentage); Assert.Equal(1, newCoverageData["file2"].CoveragePercentage); Assert.Equal(0.75, newCoverageData.CoveragePercentage); }
public void Will_merge_coverage_object_with_an_existing_one() { var coverageData1 = new CoverageData(); coverageData1["file1"] = new CoverageFileData(); coverageData1["file1"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 0, 2, 1, 0, 0 }; var coverageData2 = new CoverageData(); coverageData2["file1"] = new CoverageFileData(); coverageData2["file2"] = new CoverageFileData(); coverageData2["file1"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 0, 1, null, 0, 1 }; coverageData2["file2"].LineExecutionCounts = new int?[] { /* index 0 ignored */ 0, 5, 1, 2, 1, null }; coverageData1.Merge(coverageData2); Assert.Equal(0.6, coverageData1["file1"].CoveragePercentage); Assert.Equal(1, coverageData1["file2"].CoveragePercentage); Assert.Equal(0.778, Math.Round(coverageData1.CoveragePercentage, 3)); }
public void Valid_WhenPutNullLikePathInCoverageFile_ThenShouldReturnFalseWithError() { //Arrange var coverageData = new CoverageData(new List <List <int> > { new List <int> { 1, 2, 3 }, new List <int> { 1, 2, 3 }, new List <int> { 1, 2, 3 } }); var coverageFile = new CoverageFile(null, coverageData); //Act var result = _coverageFileValidator.Valid(coverageFile, out var errorMessage); //Assert Assert.False(result); Assert.NotEmpty(errorMessage); }
public void Valid_WhenPutRightCoverageFile_ThenShouldReturnTrueWithoutError() { //Arrange var path = "path"; var coverageData = new CoverageData(new List <List <int> > { new List <int> { 1, 2, 3 }, new List <int> { 1, 2, 3 }, new List <int> { 1, 2, 3 } }); var coverageFile = new CoverageFile(path, coverageData); //Act var result = _coverageFileValidator.Valid(coverageFile, out var errorMessage); //Assert Assert.True(result); Assert.Null(errorMessage); }
private void PrintCodeCoverageResults(CoverageData coverage) { Console.WriteLine(GetCodeCoverageMessage(coverage)); }
public void WriteCoverageFiles(CoverageData coverage) { var currentDirectory = Environment.CurrentDirectory; CoverageOutputGenerator.WriteHtmlFile(Path.Combine(currentDirectory, Constants.CoverageHtmlFileName), coverage); }
private void GetOverallStats(CoverageData coverage) { foreach (var pair in coverage) { this.TotalSourceFiles += 1; var fileData = pair.Value; var totalStatements = 0; if (fileData.LineExecutionCounts == null) { continue; } for (var i = 1; i < fileData.LineExecutionCounts.Length; i++) { var lineExecution = fileData.LineExecutionCounts[i]; if (lineExecution.HasValue) { totalStatements++; if (lineExecution > 0) { this.TotalSourceLinesCovered += 1; } } } this.TotalSourceLines += totalStatements; } }
public CoverageFile(string path, CoverageData coverageData) { Path = path; CoverageData = coverageData; }
public static string WriteJsonFile(string directory, CoverageData coverage) { var path = Path.Combine(directory, Constants.CoverageJsonFileName); var serializer = new JsonSerializer(); File.WriteAllText(path, serializer.Serialize(coverage)); return path; }
private void GotDocument(CorpusDocument doc) { if (doc == null) return; var word = string.Empty; var tokens = new List<HebMorph.Token>(); // Strip all HTML tags var strippedContent = Regex.Replace(doc.Content, @"</?[A-Z][A-Z0-9]*\b[^>]*>", " ", RegexOptions.Compiled | RegexOptions.Multiline | RegexOptions.IgnoreCase); // Remove wikipedia language referral tags strippedContent = Regex.Replace(strippedContent, @"\[\[([A-Z-]+?):(.+?):(.+?)\]\]", " ", RegexOptions.Compiled | RegexOptions.Multiline | RegexOptions.IgnoreCase); lemmatizer.SetStream(new System.IO.StringReader(strippedContent)); // The HebMorph lemmatizer will always return a token, unless an unrecognized Hebrew // word was hit, then an empty tokens array will be returned. while (lemmatizer.LemmatizeNextToken(out word, tokens) != 0) { // Invalid token if (string.IsNullOrEmpty(word) || word.Length <= 1) continue; // Unrecognized Hebrew word if (tokens.Count == 0) { var o = radix.Lookup(word); if (o != null) { o.Count++; } else { o = new CoverageData {Count = 1, FirstKnownLocation = doc.Id, KnownToHSpell = false}; radix.AddNode(word, o); } continue; } // Otherwise, the token is either in the dictionary already, or is not a Hebrew word. If we // are performing complete coverage computation, add it to the radix as well // If we are performing a coverage calculation if (ComputeCoverage) { // A non-Hebrew word if (tokens.Count == 1 && !(tokens[0] is HebMorph.HebrewToken)) continue; // Hebrew words with one lemma or more - store the word in the radix with a flag // signaling it was indeed found var o = radix.Lookup(word); if (o != null) { o.Count++; } else { o = new CoverageData {Count = 1, FirstKnownLocation = doc.Id, KnownToHSpell = true}; radix.AddNode(word, o); } } } }
public static void GenerateHtml(CoverageData coverage, Stream stream) { var successPercentage = coverage.SuccessPercentage.HasValue ? coverage.SuccessPercentage.Value : Constants.DefaultCodeCoverageSuccessPercentage; using (var writer = new StreamWriter(stream)) { writer.WriteLine(HtmlFragments.BodyContentStartFormat, HtmlFragments.Js, HtmlFragments.Css); var totalLines = 0; var totalLinesCovered = 0; var fileNumber = 0; foreach (var pair in coverage) { fileNumber++; var fileName = pair.Key; var fileData = pair.Value; var totalSmts = 0; var linesCovered = 0; var markup = new string[fileData.SourceLines.Length + 1]; var maxLineNumberLength = fileData.SourceLines.Length.ToString(CultureInfo.InvariantCulture).Length; for (var i = 0; i < fileData.SourceLines.Length; i++) { var lineNumber = (i + 1).ToString(CultureInfo.InvariantCulture).PadLeft(maxLineNumberLength); lineNumber = lineNumber.Replace(" ", " "); var line = HttpUtility.HtmlEncode(fileData.SourceLines[i]).Replace(" ", " "); markup[i + 1] = "<div class='{{executed}}'><span class=''>" + lineNumber + "</span>" + line + "</div>"; } for (var i = 1; i < fileData.LineExecutionCounts.Length; i++) { var lineExecution = fileData.LineExecutionCounts[i]; if (lineExecution.HasValue) { totalSmts++; if (lineExecution > 0) { linesCovered += 1; markup[i] = markup[i].Replace("{{executed}}", "hit"); } else { markup[i] = markup[i].Replace("{{executed}}", "miss"); } } else { markup[i] = markup[i].Replace("{{executed}}", ""); } } totalLinesCovered += linesCovered; totalLines += totalSmts; AppendResultLine(successPercentage, linesCovered, totalSmts, fileName, fileNumber, markup, writer); } AppendResultLine(successPercentage, totalLinesCovered, totalLines, "Total", 1 + fileNumber, new string[0], writer); writer.WriteLine(HtmlFragments.BodyContentEnd); } }
public TestFileSummary(string path) { Path = path; CoverageObject = new CoverageData(); }