internal static void GenerateReport(string fileName) { VersionFinder v = new VersionFinder(); SupportedFormats f = v.GetFileVersion(fileName); if (f != SupportedFormats.vs2010) { Console.WriteLine("File {0} is not a recognized as a valid trx. Only VS2010 are supported", fileName); } else { Console.WriteLine("Processing {0} trx file", f.ToString()); TrxParser parser = new TrxParser(); TestRunResult r = parser.Parse(fileName); string html = new HtmlConverter(r).GetHtml(); using (TextWriter file = File.CreateText(fileName + ".htm")) { file.Write(html); } Console.WriteLine("Tranformation Succeed. OutputFile: " + fileName + ".htm\n"); } }
private TestRunResult BuildTestRunResult(IRunResults testResults, int expectedTests, bool compressAll = true) { var resultAsArray = testResults.TestResults.ToArray(); var testCases = resultAsArray.Select(t => t.TestCase.Id).Distinct(); var ranTestsCount = testCases.Count(); var timeout = !_aborted && ranTestsCount < expectedTests; var ranTests = (compressAll && ranTestsCount >= DiscoverTests().Count) ? (ITestGuids)TestsGuidList.EveryTest() : new WrappedGuidsEnumeration(testCases); var failedTests = resultAsArray.Where(tr => tr.Outcome == TestOutcome.Failed).Select(t => t.TestCase.Id); if (ranTests.IsEmpty && (testResults.TestsInTimeout == null || testResults.TestsInTimeout.Count == 0)) { _logger.LogTrace($"{RunnerId}: Initial Test session reports 0 result and 0 stuck tests."); } var duration = testResults.TestResults.Aggregate(new TimeSpan(), (span, result) => span.Add(result.Duration)); var message = string.Join(Environment.NewLine, resultAsArray.Where(tr => !string.IsNullOrWhiteSpace(tr.ErrorMessage)) .Select(tr => $"{tr.DisplayName}{Environment.NewLine}{Environment.NewLine}{tr.ErrorMessage}")); var failedTestsDescription = new WrappedGuidsEnumeration(failedTests); var timedOutTests = new WrappedGuidsEnumeration(testResults.TestsInTimeout?.Select(t => t.Id)); return(timeout ? TestRunResult.TimedOut(ranTests, failedTestsDescription, timedOutTests, message, duration) : new TestRunResult(ranTests, failedTestsDescription, timedOutTests, message, duration)); }
private TestRunResult RunVsTest(IEnumerable <TestCase> testCases, int?timeoutMs, Dictionary <string, string> envVars) { var testResults = RunAllTests(testCases, envVars, GenerateRunSettings(timeoutMs, false), false); // For now we need to throw an OperationCanceledException when a testrun has timed out. // We know the test run has timed out because we received less test results from the test run than there are test cases in the unit test project. var resultAsArray = testResults as TestResult[] ?? testResults.ToArray(); if (resultAsArray.All(x => x.Outcome != TestOutcome.Failed) && resultAsArray.Count() < (testCases ?? _discoveredTests).Count()) { throw new OperationCanceledException(); } var testResult = new TestRunResult { Success = resultAsArray.All(tr => tr.Outcome == TestOutcome.Passed || tr.Outcome == TestOutcome.Skipped), FailingTests = resultAsArray.Where(tr => tr.Outcome == TestOutcome.Failed).Select(x => (TestDescription)x.TestCase).ToList(), ResultMessage = string.Join( Environment.NewLine, resultAsArray.Where(tr => !string.IsNullOrWhiteSpace(tr.ErrorMessage)) .Select(tr => tr.ErrorMessage)) }; return(testResult); }
public void TestsCompleted(TestRunResult runResults) { using (FileStream file = File.Open(_logFilename, FileMode.Append)) { using (var writer = new StreamWriter(file)) { writer.WriteLine("Testing done."); writer.WriteLine(); writer.Flush(); } } using (FileStream file = File.Open(_summaryFilename, FileMode.Create)) { using (var writer = new StreamWriter(file)) { writer.WriteLine("Pass: "******", Fail: " + runResults.FailedCount + " [" + runResults.DurationSummary + "]"); writer.Flush(); } } // write out failures file for build automation if (runResults.FailedCount > 0) { using (FileStream file = File.Open(_failuresFilename, FileMode.Create)) { using (var writer = new StreamWriter(file)) { foreach (TestCaseResult failure in runResults.Failures) { writer.WriteLine(failure.Name); writer.WriteLine(failure.ExceptionMessage); writer.WriteLine(); writer.WriteLine("-----------------------"); writer.WriteLine(); writer.Flush(); } } } } }
public void TestsStarting(TestRunResult runResults) { for (int i = 0; i < _processors.Length; i++) { _processors[i].TestsStarting(runResults); } }
public void TestsCompleted(TestRunResult runResults) { _lcd.WriteRow(0, "uTesting Complete"); _lcd.WriteRow(1, runResults.PassedCount + " pass, " + runResults.FailedCount + " fail"); _lcd.WriteRow(2, "test " + runResults.DurationSummary); _lcd.WriteRow(3, "elap " + runResults.ElapsedSummary); }
public void TestsCompleted(TestRunResult runResults) { for (int i = 0; i < _processors.Length; i++) { _processors[i].TestsCompleted(runResults); } }
public bool IsSuccess(TestRunResult testRunResult) { return(testRunResult.Failed == 0 && testRunResult.Skipped == 0 && testRunResult.Ambiguous == 0 && testRunResult.Undefined == 0); }
private void SetTestRunResultAfterInvalidWorkflowException(IServiceTestModelTO test, Guid resourceId, Dev2JsonSerializer serializer, string failureMessage) { test.TestFailing = false; test.TestPassed = false; test.TestPending = false; test.TestInvalid = true; test.LastRunDate = DateTime.Now; Common.Utilities.PerformActionInsideImpersonatedContext(Common.Utilities.ServerUser, () => { TestCatalog.Instance.SaveTest(resourceId, test); }); var testRunResult = new TestRunResult { TestName = test.TestName }; if (test.TestInvalid) { testRunResult.RunTestResult = RunResult.TestInvalid; testRunResult.Message = failureMessage; Dev2Logger.Error($"Test {DataObject.TestName} for Resource {DataObject.ServiceName} ID {DataObject.ResourceID} marked invalid in exception for no start node", DataObject.ExecutionID.ToString()); } testRunResult.DebugForTest = TestDebugMessageRepo.Instance.FetchDebugItems(resourceId, test.TestName); if (_request != null) { _request.ExecuteResult = serializer.SerializeToBuilder(testRunResult); } }
private void UpdateTestStep(IDSFDataObject dataObject, IServiceTestStep serviceTestStep) { if (!dataObject.IsDebugMode()) { var serviceTestSteps = serviceTestStep.Children; foreach (var serviceTestTestStep in serviceTestSteps) { UpdateForRegularActivity(dataObject, serviceTestTestStep); } } var testRunResult = new TestRunResult(); GetFinalTestRunResult(serviceTestStep, testRunResult); serviceTestStep.Result = testRunResult; if (dataObject.IsDebugMode()) { var states = TestDebugMessageRepo.Instance.GetDebugItems(dataObject.ResourceID, dataObject.TestName); if (states != null) { states = states.Where(state => state.ID == Guid.Parse(UniqueID)).ToList(); var debugState = states.FirstOrDefault(); if (debugState != null) { AddDebugItem(testRunResult, debugState); } } } }
public TestRunResult RunAll(int?timeoutMS, int?activeMutationId) { if (_testCasesDiscovered is null) { throw new Exception("_testCasesDiscovered cannot be null when running tests"); } TestRunResult testResult = new TestRunResult() { Success = false }; var testResults = RunAllTests(activeMutationId, GenerateRunSettings(timeoutMS ?? 0)); // For now we need to throw an OperationCanceledException when a testrun has timed out. // We know the testrun has timed out because we received less test results from the test run than there are test cases in the unit test project. if (testResults.Count() < _testCasesDiscovered.Value) { throw new OperationCanceledException(); } testResult = new TestRunResult { Success = testResults.All(tr => tr.Outcome == TestOutcome.Passed), ResultMessage = string.Join( Environment.NewLine, testResults.Where(tr => !string.IsNullOrWhiteSpace(tr.ErrorMessage)).Select(tr => tr.ErrorMessage)), TotalNumberOfTests = _testCasesDiscovered.Value }; return(testResult); }
static void AddDebugItem(TestRunResult testRunResult, IDebugState debugState) { var msg = testRunResult.Message; if (testRunResult.RunTestResult == RunResult.TestPassed) { msg = Messages.Test_PassedResult; } var hasError = testRunResult.RunTestResult == RunResult.TestFailed; var debugItemStaticDataParams = new DebugItemServiceTestStaticDataParams(msg, hasError); var itemToAdd = new DebugItem(); itemToAdd.AddRange(debugItemStaticDataParams.GetDebugItemResult()); if (debugState.AssertResultList != null) { var addItem = debugState.AssertResultList.Select(debugItem => debugItem.ResultsList.Where(debugItemResult => debugItemResult.Value == Messages.Test_PassedResult)).All(debugItemResults => !debugItemResults.Any()); if (addItem) { debugState.AssertResultList.Add(itemToAdd); } } }
private TestRunResult RunTestSession(IList <Mutant> mutantsToTest, int timeoutMs, TestUpdateHandler updateHandler, bool forceSingle) { TestRunResult result = null; Logger.LogTrace($"Testing {string.Join(" ,", mutantsToTest.Select(x => x.DisplayName))}."); if (TestRunner is IMultiTestRunner multi && !forceSingle) { result = multi.TestMultipleMutants(timeoutMs, mutantsToTest.ToList(), updateHandler); }
public TestRunResult RunAll(int?timeoutMS, int?activeMutationId) { var runner = TakeRunner(); TestRunResult result = runner.RunAll(timeoutMS, activeMutationId); ReturnRunner(runner); return(result); }
public TestRunResultViewModel(TestRunResult testRunResult, TestInfo testInfo) { SeleniumCommand = testInfo.SeleniumCommands.FirstOrDefault(x => x.Guid == testRunResult.CommandTestGuid); CommandTestGuid = testRunResult.CommandTestGuid; RunTime = testRunResult.RunTime; ScreenshotUrl = testRunResult?.Screenshot != null ? testRunResult?.Screenshot?.Path:null; IsSuccesful = testRunResult.IsSuccesful; CreatedBy = testRunResult.CreatedBy; DateAdded = testRunResult.DateAdded; }
void GetTestOutputForBrowserExecution(IDSFDataObject dataObject) { var serviceTestStep = dataObject.ServiceTest?.TestSteps?.FirstOrDefault(step => step.ActivityID == Guid.Parse(UniqueID)); if (serviceTestStep != null) { var testRunResult = new TestRunResult(); GetFinalTestRunResult(serviceTestStep, testRunResult, dataObject); serviceTestStep.Result = testRunResult; } }
static Mock <IServiceTestModelTO> SetupServiceTestSteps() { var serviceTestModelTO = new Mock <IServiceTestModelTO>(); var failingStep = new Mock <IServiceTestStep>(); var failingTestRunResult = new TestRunResult { Message = "Test Failed because of some reasons", RunTestResult = RunResult.TestFailed, TestName = "Test 1", }; failingStep.Setup(step => step.Result).Returns(failingTestRunResult); failingStep.Setup(step => step.Type).Returns(StepType.Assert); var steps = new List <IServiceTestStep> { failingStep.Object }; var pendingOutput = new Mock <IServiceTestOutput>(); var failingOutput = new Mock <IServiceTestOutput>(); var invalidOutput = new Mock <IServiceTestOutput>(); var pendingOutputResult = new TestRunResult { RunTestResult = RunResult.TestPending, Message = "This test is still pending", TestName = "Test 1" }; var failingOutputResult = new TestRunResult { RunTestResult = RunResult.TestFailed, Message = "This test has failed", TestName = "Test 1" }; var invalidOutputResult = new TestRunResult { RunTestResult = RunResult.TestInvalid, Message = "This test is invalid", TestName = "Test 1" }; pendingOutput.Setup(output => output.Result).Returns(pendingOutputResult); failingOutput.Setup(output => output.Result).Returns(failingOutputResult); invalidOutput.Setup(output => output.Result).Returns(invalidOutputResult); var outputs = new List <IServiceTestOutput> { pendingOutput.Object, failingOutput.Object, invalidOutput.Object }; serviceTestModelTO.Setup(to => to.TestSteps).Returns(steps); serviceTestModelTO.Setup(to => to.Outputs).Returns(outputs); return(serviceTestModelTO); }
public void TestsStarting(TestRunResult runResults) { if (_runResults != null) { _runResults.Dispose(); } _runResults = runResults; _lcd.ClearScreen(); _lcd.Write("Testing..."); _lastTestLineCount = 0; }
public void ResponseTime() { var testRunResult = new TestRunResult { StartTime = DateTime.UtcNow }; Thread.Sleep(1); testRunResult.EndTime = DateTime.UtcNow; Assert.AreEqual(testRunResult.EndTime - testRunResult.StartTime, testRunResult.ResponseTime); }
public void TestsCompleted(TestRunResult runResults) { Dispatcher.BeginInvoke(_ => { _currentTest.TextContent = " [Complete]"; _pass.TextContent = _runResults.PassedCount.ToString(); _fail.TextContent = _runResults.FailedCount.ToString(); _duration.TextContent = " Run [" + _runResults.RunStarted.ToString("hh:mm:ss") + " - " + _runResults.RunFinished.ToString("hh:mm:ss") + "] - CUT [" + _runResults.DurationSummary + "]"; return(null); }, null); }
public void TestsStarting(TestRunResult runResults) { _runResults = runResults; _failureScrollIndex = _failureCount = 0; Dispatcher.BeginInvoke(_ => { _currentTest.TextContent = " [Queued]"; _pass.TextContent = "0"; _fail.TextContent = "0"; _duration.TextContent = " Started [" + _runResults.RunStarted.ToString("hh:mm:ss") + "]"; _failurePanel.Children.Clear(); return(null); }, null); }
private static void AssertAllFailed(TestRunResult result) { TestClassRun tcr = result.TestClassList.First(t => t.Name == "trx2html.Test.AllFailed"); Assert.AreEqual("trx2html.Test.AllFailed", tcr.Name, "No coincide el nombre del TestClass"); //Assert.AreEqual("trx2html.Test.AllFailed, trx2html.Test, Version=0.0.4.0, Culture=neutral, PublicKeyToken=null", // tcr.FullName, "No coincide el nombre del TestClass"); Assert.AreEqual(TimeSpan.Parse("00:00:00.1596216"), tcr.Duration, "No se ha calculado la duración"); Assert.AreEqual(3, tcr.Failed, "No se ha calculado los fallos"); Assert.AreEqual(0, tcr.Ignored, "No se ha calculado los ignorados"); Assert.AreEqual(0, tcr.Percent, "No se ha calculado El %"); Assert.AreEqual("Failed", tcr.Status, "No se ha calculado el status"); Assert.AreEqual(0, tcr.Success, "No se ha calculado el exito"); Assert.AreEqual("trx2html.Test, Version=0.0.4.0, Culture=neutral, PublicKeyToken=null", tcr.AssemblyName.FullName); }
protected override void ExecuteTool(IDSFDataObject dataObject, int update) { _previousParentID = dataObject.ParentInstanceID; InitializeDebug(dataObject); dataObject.ForEachNestingLevel++; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); } dataObject.ParentInstanceID = UniqueID; dataObject.IsDebugNested = true; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.After, update); } var serviceTestStep = dataObject.ServiceTest?.TestSteps?.Flatten(step => step.Children)?.FirstOrDefault(step => step.UniqueId == _originalUniqueID); var serviceTestSteps = serviceTestStep?.Children; foreach (var dsfActivity in Activities) { var act = dsfActivity as IDev2Activity; if (act != null) { act.Execute(dataObject, update); if (dataObject.IsServiceTestExecution) { UpdateDebugStateWithAssertions(dataObject, serviceTestSteps?.ToList(), Guid.Parse(act.UniqueID)); } } } if (dataObject.IsServiceTestExecution) { if (serviceTestStep != null) { var testRunResult = new TestRunResult(); GetFinalTestRunResult(serviceTestStep, testRunResult); serviceTestStep.Result = testRunResult; } } OnCompleted(dataObject); if (dataObject.IsDebugMode()) { _debugOutputs = new List <DebugItem>(); DispatchDebugState(dataObject, StateType.Duration, update); } }
public static void All <T> (IEnumerable <T> collection, Func <T, bool> predicate, [CallerMemberName] string callerName = "", [CallerLineNumber] int callerLine = 0) { bool res = collection.All(predicate); var tr = new TestRunResult() { CallerName = callerName, CallerLine = callerLine.ToString(), Success = res, Assertion = "All<T>" }; TestRunner.AddResult(tr); if (!res) { tr.ErrorMessage = "Condition is not met for all members"; } }
#pragma warning disable S1541 // Methods and properties should not be too complex protected override void ExecuteTool(IDSFDataObject dataObject, int update) #pragma warning restore S1541 // Methods and properties should not be too complex { _previousParentID = dataObject.ParentInstanceID; InitializeDebug(dataObject); dataObject.ForEachNestingLevel++; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.Before, update); } dataObject.ParentInstanceID = UniqueID; dataObject.IsDebugNested = true; if (dataObject.IsDebugMode()) { DispatchDebugState(dataObject, StateType.After, update); } if (dataObject.IsServiceTestExecution && _originalUniqueID == Guid.Empty) { _originalUniqueID = Guid.Parse(UniqueID); } var serviceTestStep = dataObject.ServiceTest?.TestSteps?.Flatten(step => step.Children)?.FirstOrDefault(step => step.UniqueId == _originalUniqueID); var serviceTestSteps = serviceTestStep?.Children; foreach (var dsfActivity in Activities) { if (dsfActivity is IDev2Activity act) { ExecuteActivity(dataObject, update, serviceTestSteps, dsfActivity, act); } } if (dataObject.IsServiceTestExecution && serviceTestStep != null) { var testRunResult = new TestRunResult(); GetFinalTestRunResult(serviceTestStep, testRunResult); serviceTestStep.Result = testRunResult; } OnCompleted(dataObject); if (dataObject.IsDebugMode()) { _debugOutputs = new List <DebugItem>(); DispatchDebugState(dataObject, StateType.Duration, update); } }
public void IsSuccess_SuccessData_ShouldReturnTrue(int passed, int failed, int skipped, int ambiguous, int undefined) { // ARRANGE var nonStrictTestRunResultSuccessCalculator = new NonStrictTestRunResultSuccessCalculator(); var testRunResult = new TestRunResult( passed + failed + skipped + ambiguous + undefined, passed, failed, skipped, ambiguous, undefined); // ACT bool isSuccess = nonStrictTestRunResultSuccessCalculator.IsSuccess(testRunResult); // ASSERT isSuccess.Should().BeTrue(); }
private IServiceTestStep HandleServiceTestExecution(IDSFDataObject dataObject) { var serviceTestStep = dataObject.ServiceTest?.TestSteps?.Flatten(step => step.Children)?.FirstOrDefault(step => step.ActivityID == _originalUniqueID); if (dataObject.IsServiceTestExecution) { var serviceTestSteps = serviceTestStep?.Children; UpdateDebugStateWithAssertions(dataObject, serviceTestSteps?.ToList()); if (serviceTestStep != null) { var testRunResult = new TestRunResult(); GetFinalTestRunResult(serviceTestStep, testRunResult); serviceTestStep.Result = testRunResult; } } return(serviceTestStep); }
public void MutationTestExecutor_TimeoutShouldBePassedToProcessTimeout() { var testRunnerMock = new Mock <ITestRunner>(MockBehavior.Strict); var mutant = new Mutant { Id = 1, MustRunAgainstAllTests = true }; testRunnerMock.Setup(x => x.RunAll(It.IsAny <int>(), mutant, null)). Returns(TestRunResult.TimedOut(TestListDescription.NoTest(), TestListDescription.NoTest(), TestListDescription.EveryTest(), "")); var target = new MutationTestExecutor(testRunnerMock.Object); target.Test(new List <Mutant> { mutant }, 1999, null); mutant.ResultStatus.ShouldBe(MutantStatus.Timeout); testRunnerMock.Verify(x => x.RunAll(1999, mutant, null), Times.Once); }
private static void GetFinalTestRunResult(IServiceTestStep serviceTestStep, TestRunResult testRunResult) { var nonPassingSteps = serviceTestStep.Children?.Where(step => step.Type != StepType.Mock && step.Result?.RunTestResult != RunResult.TestPassed).ToList(); if (nonPassingSteps != null && nonPassingSteps.Count == 0) { testRunResult.Message = Messages.Test_PassedResult; testRunResult.RunTestResult = RunResult.TestPassed; } else { if (nonPassingSteps != null) { var failMessage = string.Join(Environment.NewLine, nonPassingSteps.Select(step => step.Result.Message)); testRunResult.Message = failMessage; } testRunResult.RunTestResult = RunResult.TestFailed; } }
public TestRunResult RunAll(int?timeoutMS, int?activeMutationId) { var runner = TakeRunner(); TestRunResult result = null; try { result = runner.RunAll(timeoutMS, activeMutationId); } catch (OperationCanceledException) { ReturnRunner(runner); throw; } ReturnRunner(runner); return(result); }
private void WriteSlowerMethods(TestRunResult run, StringBuilder result) { result.Append("<a name='slower' /><h5>TOP 5 Slower Methods</h5><table id=\"tSlowerMethods\" border='0' width='900px'>" + "<tr><th>TestMethod</th><th colspan='2'>Status</th><th>Duration</th></tr>"); foreach (var m in run.TopSlowerMethods) { WriteTestMethodResult(m, result); //result.AppendFormat("<tr><td>{0}.{1}</td><td>{2}</td><td>{3}</td></tr>", // GetClassNameFromFullName(m.TestClass), m.TestMethodName, m.Status, m.Duration); } result.Append("</table>"); }
private void WriteSummary(TestRunResult run, StringBuilder result) { result.AppendFormat(@"<table id='tMainSummary' border='0' width='900px'> <tr> <th>Percent</th> <th>Status</th> <th>TotalTests</th> <th>Passed</th> <th>Failed</th> <th>Inconclusive</th> <th>TimeTaken</th> </tr> <tr> <td>{0}%</td> <td width='350px' style='vertical-align:middle;font-size:200%'>{1}</td> <td>{2}</td> <td>{3}</td> <td>{4}</td> <td>{5}</td> <td>{6}</td> </tr> </table> <br />", run.TotalPercent, CreateHtmlBars(run), run.TotalMethods, run.Passed, run.Failed, run.Inconclusive, run.TimeTaken); }
private void WriteSummaryDetails(TestRunResult run, StringBuilder result) { result.Append(@"<table id='tSummaryDetail' border='0' width='900px' > <tr> <th colspan='7'>Failed TestClasses Summary (<a href='#' onclick='showAllTestClassesSummary()'> Show All</a> )</th> </tr> <tr> <th>Class Name</th> <th>Percent</th> <th>Status</th> <th>TestsPassed</th> <th>TestsFailed</th> <th>TestsIgnored</th> <th>Duration</th> </tr>"); foreach (var testClass in run.TestClassList) { result.AppendFormat("<tr style=\"display:{0}\" >", testClass.Status == "Succeed" ? "none" : "block"); result.AppendFormat("<td><a href='#{0}'>{1}</a></td>", testClass.Name,testClass.Name); result.AppendFormat("<td>{0}%</td>", testClass.Percent); result.AppendFormat("<td width=\"80px\">{0}</td>", CreateHtmlBars(testClass)); result.AppendFormat("<td>{0}</td>", testClass.Success); result.AppendFormat("<td>{0}</td>", testClass.Failed); result.AppendFormat("<td>{0}</td>", testClass.Ignored); result.AppendFormat("<td>{0}</td>", testClass.Duration); result.AppendFormat("</tr>\r\n"); } result.Append("</table>"); }
private void WriteBody(TestRunResult run, StringBuilder result) { WriteSummary(run, result); WriteSummaryDetails(run, result); WriteSlowerMethods(run, result); var classes = run.TestMethodRunList.GroupBy(m => m.TestClass); foreach (var c in classes) { TestClassRun tcr = run.TestClassList.First(tc => tc.FullName == c.Key); WriteClassResult(tcr, result); } }
private void WriteHeader(TestRunResult run, StringBuilder result) { result.AppendFormat("<html><head><title>{0}</title>", run.Name); //result.Append("<link type='text/css' rel='StyleSheet' href='trx2html.css' />"); result.Append("<style type='text/css'>"); result.Append(GetCss()); result.Append("</style>"); result.Append("<script type=\"text/javascript\">"); result.Append(@" function togle(anId){ var el = document.getElementById(anId); if (el!=null){ if (el.style.display=='none'){ el.style.display='block'; }else{ el.style.display='none'; } } } function showAllTestClassesSummary(){ var tSummaryDetails = document.getElementById('tSummaryDetail'); for (var i=0;i<tSummaryDetails.rows.length;i++){ r = tSummaryDetails.rows[i]; r.style.display='block'; } } "); result.Append("</script>"); result.Append("</head><body>"); result.Append("<a name='__top' />"); result.AppendFormat(@"<h3>{0}</h3> <div class='contents'> <a href='#totals'>Totals</a> | <a href='#summary'>Summary</a> | <a href='#slower'>Slowers</a> | <a href='#detail'>Detail</a> | <a href='#envInfo'>Environment Information</a> </div> <br /> <a name='totals' />", run.Name); }
private void WriteFooter(TestRunResult run, StringBuilder result) { result.Append("<hr style=\"border-style:dotted;color:#dcdcdc\"/>" + "<i style=\"width:100%;font:10pt Verdana;text-align:center;background-color:#dcdcdc\">" + "The VSTS Test Results HTML Viewer. (c) <a href=\"http://blogs.msdn.com/rido\">rido</a>'2011" + "</i>"); result.Append("</body></html>"); }
public HtmlConverter(TestRunResult testRunResult) { run = testRunResult; }
private void WriteEnvironmentInfo(TestRunResult run, StringBuilder result) { result.Append("<br /><a name=\"envInfo\" /><table width='900px'><tr><th colspan=\"2\">TestRun Environment Information</th></tr>"); result.AppendFormat("<tr><th align=\"right\">MachineName</th><td>{0}</td></tr>", run.Computers.First()); result.Append("<tr><th align=\"right\">TestAssemblies</th><td>"); foreach (var a in run.Assemblies) { result.Append(a.FullName + " <br />"); } result.Append("</td></tr>"); result.AppendFormat("<tr><th align=\"right\">UserName</th><td>{0}</td></tr>", run.UserName); result.AppendFormat("<tr><th align=\"right\">TRX File</th><td>{0}</td></tr>", run.Name); result.Append("</table>"); }