public Dictionary <string, string> GetSubResultsJsonByMethodName(string wttInputPath, string wttSingleRerunInputPath, string wttMultipleRerunInputPath) { Dictionary <string, string> subResultsJsonByMethod = new Dictionary <string, string>(); TestPass testPass = TestPass.ParseTestWttFileWithReruns(wttInputPath, wttSingleRerunInputPath, wttMultipleRerunInputPath, cleanupFailuresAreRegressions: true, truncateTestNames: false); foreach (var result in testPass.TestResults) { var methodName = result.Name.Substring(result.Name.LastIndexOf('.') + 1); if (!result.Passed) { // If the test failed but then passed on rerun, then we'll add metadata to report the results of each run. // Otherwise, we'll mark down the failure information. if (result.PassedOnRerun) { JsonSerializableTestResults serializableResults = new JsonSerializableTestResults(); serializableResults.blobPrefix = helixResultsContainerUri; serializableResults.blobSuffix = helixResultsContainerRsas; List <string> errorList = new List <string>(); errorList.Add(result.Details); foreach (TestResult rerunResult in result.RerunResults) { errorList.Add(rerunResult.Details); } serializableResults.errors = errorList.Distinct().Where(s => s != null).ToArray(); var reason = new XElement("reason"); List <JsonSerializableTestResult> serializableResultList = new List <JsonSerializableTestResult>(); serializableResultList.Add(ConvertToSerializableResult(result, serializableResults.errors)); foreach (TestResult rerunResult in result.RerunResults) { serializableResultList.Add(ConvertToSerializableResult(rerunResult, serializableResults.errors)); } serializableResults.results = serializableResultList.ToArray(); using (MemoryStream stream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(JsonSerializableTestResults)); serializer.WriteObject(stream, serializableResults); stream.Position = 0; using (StreamReader streamReader = new StreamReader(stream)) { subResultsJsonByMethod.Add(methodName, streamReader.ReadToEnd()); } } } } } return(subResultsJsonByMethod); }
public void ConvertWttLogToXUnitLog(string wttInputPath, string wttSingleRerunInputPath, string wttMultipleRerunInputPath, string xunitOutputPath) { TestPass testPass = TestPass.ParseTestWttFileWithReruns(wttInputPath, wttSingleRerunInputPath, wttMultipleRerunInputPath, cleanupFailuresAreRegressions: true, truncateTestNames: false); var results = testPass.TestResults; int resultCount = results.Count; int passedCount = results.Where(r => r.Passed).Count(); int passedOnRerunCount = results.Where(r => r.PassedOnRerun).Count(); int failedCount = resultCount - passedCount; var root = new XElement("assemblies"); var assembly = new XElement("assembly"); assembly.SetAttributeValue("name", "MUXControls.Test.dll"); assembly.SetAttributeValue("test-framework", "TAEF"); assembly.SetAttributeValue("run-date", DateTime.Now.ToString("yyyy-mm-dd")); // This doesn't need to be completely accurate since it's not exposed anywhere. // If we need accurate an start time we can probably calculate it from the te.wtl file, but for // now this is fine. assembly.SetAttributeValue("run-time", (DateTime.Now - testPass.TestPassExecutionTime).ToString("hh:mm:ss")); assembly.SetAttributeValue("total", resultCount); assembly.SetAttributeValue("passed", passedCount); assembly.SetAttributeValue("failed", failedCount); assembly.SetAttributeValue("skipped", passedOnRerunCount); assembly.SetAttributeValue("time", (int)testPass.TestPassExecutionTime.TotalSeconds); assembly.SetAttributeValue("errors", 0); root.Add(assembly); var collection = new XElement("collection"); collection.SetAttributeValue("total", resultCount); collection.SetAttributeValue("passed", passedCount); collection.SetAttributeValue("failed", failedCount); collection.SetAttributeValue("skipped", passedOnRerunCount); collection.SetAttributeValue("name", "Test collection"); collection.SetAttributeValue("time", (int)testPass.TestPassExecutionTime.TotalSeconds); assembly.Add(collection); foreach (var result in results) { var test = new XElement("test"); test.SetAttributeValue("name", testNamePrefix + "." + result.Name); var className = result.Name.Substring(0, result.Name.LastIndexOf('.')); var methodName = result.Name.Substring(result.Name.LastIndexOf('.') + 1); test.SetAttributeValue("type", className); test.SetAttributeValue("method", methodName); test.SetAttributeValue("time", result.ExecutionTime.TotalSeconds); string resultString = string.Empty; if (result.Passed) { resultString = "Pass"; } else if (result.PassedOnRerun) { resultString = "Skip"; } else { resultString = "Fail"; } test.SetAttributeValue("result", resultString); if (!result.Passed) { // If the test failed but then passed on rerun, then we'll add metadata to report the results of each run. // Otherwise, we'll mark down the failure information. if (result.PassedOnRerun) { // We'll save the subresults to a JSON text file that we'll upload to the helix results container - // this allows it to be as long as we want, whereas the reason field in Azure DevOps has a 4000 character limit. string subResultsFileName = methodName + "_subresults.json"; string subResultsFilePath = Path.Combine(Path.GetDirectoryName(wttInputPath), subResultsFileName); var reason = new XElement("reason"); reason.Add(new XCData(GetUploadedFileUrl(subResultsFileName, helixResultsContainerUri, helixResultsContainerRsas))); test.Add(reason); } else { var failure = new XElement("failure"); failure.SetAttributeValue("exception-type", "Exception"); var message = new XElement("message"); StringBuilder errorMessage = new StringBuilder(); errorMessage.AppendLine("Log: " + GetUploadedFileUrl(result.SourceWttFile, helixResultsContainerUri, helixResultsContainerRsas)); errorMessage.AppendLine(); if (result.Screenshots.Any()) { errorMessage.AppendLine("Screenshots:"); foreach (var screenshot in result.Screenshots) { errorMessage.AppendLine(GetUploadedFileUrl(screenshot, helixResultsContainerUri, helixResultsContainerRsas)); errorMessage.AppendLine(); } } errorMessage.AppendLine("Error Log: "); errorMessage.AppendLine(result.Details); message.Add(new XCData(errorMessage.ToString())); failure.Add(message); test.Add(failure); } } collection.Add(test); } File.WriteAllText(xunitOutputPath, root.ToString()); }
public void ConvertWttLogToXUnitLog(string wttInputPath, string wttSingleRerunInputPath, string wttMultipleRerunInputPath, string xunitOutputPath, int requiredPassRateThreshold) { TestPass testPass = TestPass.ParseTestWttFileWithReruns(wttInputPath, wttSingleRerunInputPath, wttMultipleRerunInputPath, cleanupFailuresAreRegressions: true, truncateTestNames: false); var results = testPass.TestResults; int resultCount = results.Count; int passedCount = results.Where(r => r.Passed).Count(); // Since we re-run tests on failure, we'll mark every test that failed at least once as "skipped" rather than "failed". // If the test failed sufficiently often enough for it to count as a failed test (determined by a property on the // Azure DevOps job), we'll later mark it as failed during test results processing. int failedCount = results.Where(r => !r.PassedOrUnreliable(requiredPassRateThreshold)).Count(); int skippedCount = results.Where(r => !r.Passed && r.PassedOrUnreliable(requiredPassRateThreshold)).Count(); var root = new XElement("assemblies"); var assembly = new XElement("assembly"); assembly.SetAttributeValue("name", "MUXControls.Test.dll"); assembly.SetAttributeValue("test-framework", "TAEF"); assembly.SetAttributeValue("run-date", DateTime.Now.ToString("yyyy-MM-dd")); // This doesn't need to be completely accurate since it's not exposed anywhere. // If we need accurate an start time we can probably calculate it from the te.wtl file, but for // now this is fine. assembly.SetAttributeValue("run-time", (DateTime.Now - testPass.TestPassExecutionTime).ToString("hh:mm:ss")); assembly.SetAttributeValue("total", resultCount); assembly.SetAttributeValue("passed", passedCount); assembly.SetAttributeValue("failed", failedCount); assembly.SetAttributeValue("skipped", skippedCount); assembly.SetAttributeValue("time", (int)testPass.TestPassExecutionTime.TotalSeconds); assembly.SetAttributeValue("errors", 0); root.Add(assembly); var collection = new XElement("collection"); collection.SetAttributeValue("total", resultCount); collection.SetAttributeValue("passed", passedCount); collection.SetAttributeValue("failed", failedCount); collection.SetAttributeValue("skipped", skippedCount); collection.SetAttributeValue("name", "Test collection"); collection.SetAttributeValue("time", (int)testPass.TestPassExecutionTime.TotalSeconds); assembly.Add(collection); foreach (var result in results) { var test = new XElement("test"); test.SetAttributeValue("name", testNamePrefix + "." + result.Name); var className = GetTestClassName(result.Name); var methodName = GetTestMethodName(result.Name); test.SetAttributeValue("type", className); test.SetAttributeValue("method", methodName); test.SetAttributeValue("time", result.ExecutionTime.TotalSeconds); string resultString = string.Empty; if (result.Passed) { resultString = "Pass"; } else if (result.PassedOrUnreliable(requiredPassRateThreshold)) { resultString = "Skip"; } else { resultString = "Fail"; } test.SetAttributeValue("result", resultString); if (!result.Passed) { // If a test failed, we'll have rerun it multiple times. // We'll save the subresults to a JSON text file that we'll upload to the helix results container - // this allows it to be as long as we want, whereas the reason field in Azure DevOps has a 4000 character limit. string subResultsFileName = methodName + "_subresults.json"; string subResultsFilePath = Path.Combine(Path.GetDirectoryName(wttInputPath), subResultsFileName); if (result.PassedOrUnreliable(requiredPassRateThreshold)) { var reason = new XElement("reason"); reason.Add(new XCData(GetUploadedFileUrl(subResultsFileName, helixResultsContainerUri, helixResultsContainerRsas))); test.Add(reason); } else { var failure = new XElement("failure"); var message = new XElement("message"); message.Add(new XCData(GetUploadedFileUrl(subResultsFileName, helixResultsContainerUri, helixResultsContainerRsas))); failure.Add(message); test.Add(failure); } } collection.Add(test); } File.WriteAllText(xunitOutputPath, root.ToString()); }
public void ConvertWttLogToXUnitLog(string wttInputPath, string wttSingleRerunInputPath, string wttMultipleRerunInputPath, string xunitOutputPath) { TestPass testPass = TestPass.ParseTestWttFileWithReruns(wttInputPath, wttSingleRerunInputPath, wttMultipleRerunInputPath, cleanupFailuresAreRegressions: true, truncateTestNames: false); var results = testPass.TestResults; int resultCount = results.Count; int passedCount = results.Where(r => r.Passed).Count(); int passedOnRerunCount = results.Where(r => r.PassedOnRerun).Count(); int failedCount = resultCount - passedCount; var root = new XElement("assemblies"); var assembly = new XElement("assembly"); assembly.SetAttributeValue("name", "MUXControls.Test.dll"); assembly.SetAttributeValue("test-framework", "TAEF"); assembly.SetAttributeValue("run-date", DateTime.Now.ToString("yyyy-mm-dd")); // This doesn't need to be completely accurate since it's not exposed anywhere. // If we need accurate an start time we can probably calculate it from the te.wtl file, but for // now this is fine. assembly.SetAttributeValue("run-time", (DateTime.Now - testPass.TestPassExecutionTime).ToString("hh:mm:ss")); assembly.SetAttributeValue("total", resultCount); assembly.SetAttributeValue("passed", passedCount); assembly.SetAttributeValue("failed", failedCount); assembly.SetAttributeValue("skipped", passedOnRerunCount); assembly.SetAttributeValue("time", (int)testPass.TestPassExecutionTime.TotalSeconds); assembly.SetAttributeValue("errors", 0); root.Add(assembly); var collection = new XElement("collection"); collection.SetAttributeValue("total", resultCount); collection.SetAttributeValue("passed", passedCount); collection.SetAttributeValue("failed", failedCount); collection.SetAttributeValue("skipped", passedOnRerunCount); collection.SetAttributeValue("name", "Test collection"); collection.SetAttributeValue("time", (int)testPass.TestPassExecutionTime.TotalSeconds); assembly.Add(collection); foreach (var result in results) { var test = new XElement("test"); test.SetAttributeValue("name", testNamePrefix + "." + result.Name); var className = result.Name.Substring(0, result.Name.LastIndexOf('.')); var methodName = result.Name.Substring(result.Name.LastIndexOf('.') + 1); test.SetAttributeValue("type", className); test.SetAttributeValue("method", methodName); test.SetAttributeValue("time", result.ExecutionTime.TotalSeconds); string resultString = string.Empty; if (result.Passed) { resultString = "Pass"; } else if (result.PassedOnRerun) { resultString = "Skip"; } else { resultString = "Fail"; } test.SetAttributeValue("result", resultString); if (!result.Passed) { // If the test failed but then passed on rerun, then we'll add metadata to report the results of each run. // Otherwise, we'll mark down the failure information. if (result.PassedOnRerun) { JsonSerializableTestResults serializableResults = new JsonSerializableTestResults(); serializableResults.blobPrefix = helixResultsContainerUri; serializableResults.blobSuffix = helixResultsContainerRsas; List <string> errorList = new List <string>(); errorList.Add(result.Details); foreach (TestResult rerunResult in result.RerunResults) { errorList.Add(rerunResult.Details); } serializableResults.errors = errorList.Distinct().Where(s => s != null).ToArray(); var reason = new XElement("reason"); List <JsonSerializableTestResult> serializableResultList = new List <JsonSerializableTestResult>(); serializableResultList.Add(ConvertToSerializableResult(result, serializableResults.errors)); foreach (TestResult rerunResult in result.RerunResults) { serializableResultList.Add(ConvertToSerializableResult(rerunResult, serializableResults.errors)); } serializableResults.results = serializableResultList.ToArray(); MemoryStream stream = new MemoryStream(); DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(JsonSerializableTestResults)); serializer.WriteObject(stream, serializableResults); stream.Position = 0; StreamReader streamReader = new StreamReader(stream); reason.Add(new XCData(streamReader.ReadToEnd())); test.Add(reason); } else { var failure = new XElement("failure"); failure.SetAttributeValue("exception-type", "Exception"); var message = new XElement("message"); StringBuilder errorMessage = new StringBuilder(); errorMessage.AppendLine("Log: " + GetUploadedFileUrl(result.SourceWttFile, helixResultsContainerUri, helixResultsContainerRsas)); errorMessage.AppendLine(); if (result.Screenshots.Any()) { errorMessage.AppendLine("Screenshots:"); foreach (var screenshot in result.Screenshots) { errorMessage.AppendLine(GetUploadedFileUrl(screenshot, helixResultsContainerUri, helixResultsContainerRsas)); errorMessage.AppendLine(); } } errorMessage.AppendLine("Error Log: "); errorMessage.AppendLine(result.Details); message.Add(new XCData(errorMessage.ToString())); failure.Add(message); test.Add(failure); } } collection.Add(test); } File.WriteAllText(xunitOutputPath, root.ToString()); }