/// <summary>Gets the color to use when displaying the specified type of results.</summary> /// <returns></returns> internal static Color GetDisplayColor(this TestResultType resultType) { switch (resultType) { case TestResultType.Passed: return(Color.DarkGreen); case TestResultType.Error: case TestResultType.AssertFailure: case TestResultType.ResultAssertFailure: case TestResultType.Exception: case TestResultType.RegressionAssertFailure: return(Color.Red); case TestResultType.Inconclusive: case TestResultType.ResultInconclusive: return(Color.Wheat); case TestResultType.DataRace: case TestResultType.Deadlock: case TestResultType.Livelock: return(Color.FromArgb(192, 0, 0)); default: throw new NotImplementedException("The mchess result label is not recognized: " + resultType); } }
/// <summary> /// Changes the node's icon based on the specified test result. /// </summary> protected void UpdateImageListIndex(TestResultType result) { int previousImageIndex = ImageIndex; switch (result) { case TestResultType.Failure: ImageIndex = (int)TestTreeViewImageListIndex.TestFailed; break; case TestResultType.Success: ImageIndex = (int)TestTreeViewImageListIndex.TestPassed; break; case TestResultType.Ignored: ImageIndex = (int)TestTreeViewImageListIndex.TestIgnored; break; case TestResultType.None: ImageIndex = (int)TestTreeViewImageListIndex.TestNotRun; break; } SelectedImageIndex = ImageIndex; if (previousImageIndex != ImageIndex) { OnImageIndexChanged(); } }
private static ConsoleColor ResultTypeToConsoleColor(TestResultType resultType) { switch (resultType) { // Success case TestResultType.Passed: return(ConsoleColor.Green); // Inconclusive case TestResultType.Inconclusive: case TestResultType.ResultInconclusive: return(InconclusiveColor); // Soft errors case TestResultType.DataRace: case TestResultType.Deadlock: case TestResultType.Livelock: return(ConsoleColor.DarkRed); // Errors case TestResultType.Error: case TestResultType.Exception: case TestResultType.AssertFailure: case TestResultType.ResultAssertFailure: case TestResultType.RegressionAssertFailure: return(ErrorColor); default: throw new NotImplementedException("Result type not implemented: " + resultType); } }
/// <summary> /// Asserts BoostTestResult against the expected details /// </summary> /// <param name="testResult">The BoostTestResult to test</param> /// <param name="parentTestResult">The expected parent BoostTestResult of testResult</param> /// <param name="name">The expected TestCase display name</param> /// <param name="result">The expected TestCase execution result</param> /// <param name="assertionsPassed">The expected number of passed assertions (e.g. BOOST_CHECKS)</param> /// <param name="assertionsFailed">The expected number of failed assertions (e.g. BOOST_CHECKS, BOOST_REQUIRE, BOOST_FAIL etc.)</param> /// <param name="expectedFailures">The expected number of expected test failures</param> private void AssertReportDetails( BoostTestResult testResult, BoostTestResult parentTestResult, string name, TestResultType result, uint assertionsPassed, uint assertionsFailed, uint expectedFailures ) { Assert.That(testResult.Unit.Name, Is.EqualTo(name)); if (parentTestResult == null) { Assert.That(testResult.Unit.Parent, Is.Null); } else { Assert.That(parentTestResult.Unit, Is.EqualTo(testResult.Unit.Parent)); } Assert.That(testResult.Result, Is.EqualTo(result)); Assert.That(testResult.AssertionsPassed, Is.EqualTo(assertionsPassed)); Assert.That(testResult.AssertionsFailed, Is.EqualTo(assertionsFailed)); Assert.That(testResult.ExpectedFailures, Is.EqualTo(expectedFailures)); }
public SubTest(string name, TestResultType testResultType, string message, int subTestNumber) { Name = name; TestResultType = testResultType; Message = message; SubTestNumber = subTestNumber; }
private Dictionary <string, string> GetJobDataMapDictionary(TestResultType resultType, string expectedExceptionMessage) { return(new Dictionary <string, string> { { TestJobAttributeKey.ExecutionResult, resultType.ConvertToInt().ToString() }, { TestJobAttributeKey.ExecutionMessage, expectedExceptionMessage } }); }
void ChangeResult(TestResultType newResult) { TestResultType oldResult = result; if (oldResult != newResult) { result = newResult; OnResultChanged(new TestResultTypeChangedEventArgs(oldResult, newResult)); } }
/// <summary> /// Cleanup after application /// </summary> protected override void BeforeCleanup(TestResultType resultType) { // Make sure we get all the logging info base.BeforeCleanup(resultType); // Cleanup after the app NotepadApplication?.CloseAndDontSave(); NotepadApplication = null; }
void SetTestResult(TestResultType value) { TestResultType previousTestResult = testResult; testResult = value; if (testResult != previousTestResult) { OnResultChanged(); } }
public TestResult(TestRunMetaData metaData, string name, TestResultType type, DateTime start, DateTime end, List <SubTest> subTests) { TestRunMetaData = metaData; Name = name; TestResultType = type; Time = end.ToString("yyyy-MM-dd HH:mm:ss"); EndTime = end; StartTime = start; SubTests = subTests; }
void ChangeResult(TestResultType newResult) { TestResultType oldResult = result; if (oldResult == TestResultType.Failure && newResult == TestResultType.Success) { return; // do not revert result from failure to success (Test with data source) } if (oldResult != newResult) { result = newResult; OnResultChanged(new TestResultTypeChangedEventArgs(oldResult, newResult)); } }
string GetResult(TestResultType resultType) { switch (resultType) { case TestResultType.Success: return "Success"; case TestResultType.Ignored: return "Ignored"; case TestResultType.Failure: return "Failure"; } return String.Empty; }
public static XElement CreateXTestResult(TestResultType resultType, string message, Exception ex, params object[] xcontent) { XElement xresult = new XElement(XTestResultNames.TestResult , new XAttribute(XTestResultNames.ATestResultType, resultType) , new XAttribute(XTestResultNames.AExitCode, (int)resultType) , new XElement(XTestResultNames.ResultMessage, message ?? resultType.ToString()) , ex == null ? null : XNames.CreateXError(ex) ); xresult.Add(xcontent); return(xresult); }
internal static int GetMVTCount(TestResultType type) { int result = 0; for (int i = 0; i < _History.Count; i++) { if (_History[i].GetResultType() == type) { result++; } } return(result); }
/// <summary> /// Converts a Boost.Test.Result.Result enumeration into an equivalent /// Microsoft.VisualStudio.TestPlatform.ObjectModel.TestOutcome. /// </summary> /// <param name="result">The Boost.Test.Result.Result value to convert.</param> /// <returns>The Boost.Test.Result.Result enumeration converted into Microsoft.VisualStudio.TestPlatform.ObjectModel.TestOutcome.</returns> private static VSTestOutcome GetTestOutcome(TestResultType result) { switch (result) { case TestResultType.Passed: return(VSTestOutcome.Passed); case TestResultType.Skipped: return(VSTestOutcome.Skipped); case TestResultType.Failed: case TestResultType.Aborted: default: return(VSTestOutcome.Failed); } }
public static string Serialize(TestResultType resultType) { switch (resultType) { case TestResultType.Success: return("erfolgreich"); case TestResultType.Failure: return("fehlgeschlagen"); case TestResultType.PrecoditionFailure: return("Vorbedingungen nicht erfüllt"); default: throw new ArgumentException($"Unknown test result type: \"{ resultType }\""); } }
public virtual void Setup() { // Get class and method names and assign it TestMethodName = TestContext.TestName; ClassName = TestContext.FullyQualifiedTestClassName; ClassName = ClassName.Substring(ClassName.LastIndexOf('.') + 1); Logger.TestStartInfo(TestMethodName, ClassName); TestResult = TestResultType.Pass; result = new Result(TestContext); loginAccessLayer = new LoginAccessLayer(); ixmUtils = new IXMWebUtils(); dbInteraction = new DBInteraction(); }
string GetResult(TestResultType resultType) { switch (resultType) { case TestResultType.Success: return("Success"); case TestResultType.Ignored: return("Ignored"); case TestResultType.Failure: return("Failure"); } return(String.Empty); }
public int ToDbTestResultTypeId(TestResultType testResultType) { switch (testResultType) { case TestResultType.Fail: return(1); case TestResultType.Inconclusive: return(2); case TestResultType.Pass: return(3); default: throw new InvalidOperationException("Test Result Type not recognized"); } }
public MultiVariateTest(int mvtid, TestTypesEnum tType) { id = mvtid; testType = tType; isActive = true; LaunchDateTime = DateTime.UtcNow; ResultType = TestResultType.Live; AssignedGuidsTotal = 0; Variation control = control = new Variation(0, true); if (tType == TestTypesEnum.ADDTOCARTBUTTON) { control.SetVariationType(AddToCartButton.GetCurrentWinnderControl()); } variants.Add(control); }
/// <summary> /// Take a screen shot if needed and tear down the web driver /// </summary> /// <param name="resultType">The test result</param> protected override void BeforeLoggingTeardown(TestResultType resultType) { // Try to take a screen shot try { if (this.Log is FileLogger && resultType != TestResultType.PASS && this.LoggingEnabledSetting != LoggingEnabled.NO) { SeleniumUtilities.CaptureScreenshot(this.WebDriver, this.Log); if (SeleniumConfig.GetSavePagesourceOnFail()) { SeleniumUtilities.SavePageSource(this.WebDriver, this.Log, "FinalPageSource"); } } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to get screen shot because: {0}", e.Message); } }
/// <summary> /// Take a screen shot if needed and tear down the web driver /// </summary> /// <param name="resultType">The test result</param> protected override void BeforeCleanup(TestResultType resultType) { // Try to take a screen shot try { if (this.TestObject.GetDriverManager <SeleniumDriverManager>().IsDriverIntialized() && this.Log is IFileLogger && resultType != TestResultType.PASS && this.LoggingEnabledSetting != LoggingEnabled.NO) { SeleniumUtilities.CaptureScreenshot(this.WebDriver, this.TestObject, " Final"); if (SeleniumConfig.GetSavePagesourceOnFail()) { SeleniumUtilities.SavePageSource(this.WebDriver, this.TestObject, "FinalPageSource"); } } } catch (Exception e) { this.TryToLog(MessageType.WARNING, $"Failed to get screen shot because: {e.Message}"); } }
/// <summary> /// Take a screen shot if needed and tear down the appium driver /// </summary> /// <param name="resultType">The test result</param> protected override void BeforeLoggingTeardown(TestResultType resultType) { try { // Captures screenshot if test result is not a pass and logging is enabled if (this.TestObject.GetDriverManager <MobileDriverManager>().IsDriverIntialized() && this.Log is FileLogger && resultType != TestResultType.PASS && this.LoggingEnabledSetting != LoggingEnabled.NO) { AppiumUtilities.CaptureScreenshot(this.AppiumDriver, this.TestObject); if (AppiumConfig.GetSavePagesourceOnFail()) { AppiumUtilities.SavePageSource(this.AppiumDriver, this.TestObject, "FinalPageSource"); } } } catch (Exception exception) { this.TryToLog(MessageType.WARNING, "Failed to get screen shot because: {0}", exception.Message); } }
public void WriteTestResult(TestResultType resulttype, string testName, string screenshotpath, string exception = "") { srno += 1; str = new StringBuilder(); if (resulttype == TestResultType.Pass) { pass += 1; str.Insert(0, ReadReportTemplate()); testStartTime.Stop(); str = str.Replace("##body##", string.Format("<tr><td>{0}</td><td>{1}</td><td><font color='green'><b>Pass</b></font></td><td><a href='{2}'>{3}</a></td><td>{4}</td><td>{5}</td></tr> ##body##", srno, testName, screenshotpath, Path.GetFileName(screenshotpath), DateTime.Now.ToString("MM/dd/yy hh:mm:ss tt"), testStartTime.Elapsed.TotalSeconds)); } else if (resulttype == TestResultType.Fail) { fail += 1; str.Insert(0, ReadReportTemplate()); str = str.Replace("##body##", string.Format("<tr><td>{0}</td><td>{1}</td><td><font color='red'><b>Fail</b></font></td><td><a href='{2}'>{3}</a></td><td>{4}</td><td>{5}</td></tr> ##body##", srno, testName, screenshotpath, Path.GetFileName(screenshotpath), DateTime.Now.ToString("MM/dd/yy hh:mm:ss tt"), testStartTime.Elapsed.TotalSeconds)); } WriteReport(str); }
/// <summary> /// Take a screen shot if needed and tear down the web driver /// </summary> /// <param name="resultType">The test result</param> protected override void BeforeLoggingTeardown(TestResultType resultType) { // Try to take a screen shot try { if (this.Log is FileLogger && resultType != TestResultType.PASS && this.LoggingEnabledSetting != LoggingEnabled.NO) { SeleniumUtilities.CaptureScreenshot(this.WebDriver, this.Log); } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to get screen shot because: {0}", e.Message); } this.TryToLog(MessageType.INFORMATION, "Closing webDriver"); try { // Clear the waiter this.WebDriver.RemoveWaitDriver(); // Quite this.WebDriver.Quit(); } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to quit because: {0}", e.Message); } try { this.WebDriver.Dispose(); } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to dispose because: {0}", e.Message); } }
private static TestResult ParseCard(HtmlNode card, TestRunMetaData testRunMetaData) { var name = card.SelectSingleNode(".//span[@class='fixture-name']").InnerText; var testResultType = ParseTestResultType(card); var startTimeString = card.SelectSingleNode(".//span[@class='startedAt']").InnerText; var endTimeString = card.SelectSingleNode(".//span[@class='endedAt']").InnerText; var startDateTime = DateTime.Parse(startTimeString); var endDateTime = DateTime.Parse(endTimeString); var subtestNodes = card.SelectNodes(".//div[@class='fixture-content']//table//tr"); List <SubTest> subTests = new List <SubTest>(); foreach (var node in subtestNodes.Skip(1)) { var subName = node.SelectSingleNode(".//td[@class='test-name']").InnerText; TestResultType subResultType = TestResultType.Passed; string subMessage = ""; var failedNode = node.SelectSingleNode(".//td[@class='failed']"); var skippedNode = node.SelectSingleNode(".//td[@class='skipped']"); if (failedNode != null) { subResultType = TestResultType.Failed; subMessage = node.SelectSingleNode(".//td[@class='failed']//pre").InnerText; } else if (skippedNode != null) { subResultType = TestResultType.Skipped; subMessage = node.SelectSingleNode(".//td[@class='skipped']//pre").InnerText; } subTests.Add(new SubTest(subName, subResultType, subMessage, subTests.Count)); } return(new TestResult(testRunMetaData, name, testResultType, startDateTime, endDateTime, subTests)); }
public void Remove(TestResultType result) { switch (result) { case TestResultType.None: indeterminate--; break; case TestResultType.Success: successful--; break; case TestResultType.Failure: failed--; break; case TestResultType.Ignored: ignored--; break; default: throw new NotSupportedException("Invalid value for TestResultType"); } }
/// <summary> /// Finalises a test that reached completion. /// </summary> private void FinishTest() { if (isActive == true) { CompletionDate = DateTime.Now; isActive = false; //Picking winning variant based on highest mean (and reached significance) //TODO, this implementation works only for A/B. For MVT (multi variants, a loop is needed //to select the most promising variant. //Did it reach significance if (Result.ReachedSignificance[0] == true) { //Is the control or the test variant has higher mean. int winnerVariantId = -1; if (Result.Means[0] > Result.Means[1]) { winnerVariantId = 0; ResultType = TestResultType.Negative; } else { winnerVariantId = 1; ResultType = TestResultType.Positive; } if (testType == TestTypesEnum.ADDTOCARTBUTTON) { //Set the new winner control variant. AddToCartButton.SetNewWinnerControl((int)variants[winnerVariantId].GetVariationType().id); } } else { //If test did not reach significance, then consider it abandonned. ResultType = TestResultType.Inconclusive; } /* * int highestmeanIndex = -1; * for (int i = 0; i < Result.ReachedSignificance.Length; i++) * { * * if(Result.ReachedSignificance[i] == true) * { * if(highestmeanIndex == -1) * { * highestmeanIndex = i; * } * else * { * //Is the test variant better than the control * if(Result.Means[0] > Result.Means[i+1]) * { * highestmeanIndex = i + 1; * } * } * } * * }*/ } }
private static TestResult GetResultForProject(TestRun testRun, Exception exception, TestResultType testResultType) { var description = string.Format("There was an error, when running '{0}' ({1})", testRun.Id, testResultType); return GetResult(exception, description, TestType.Project, string.Empty); }
/// <summary> /// States that the test case result. /// </summary> /// <param name="result">The test case execution result</param> /// <returns>this</returns> public BoostTestResultBuilder Result(TestResultType result) { this.ResultType = result; return this; }
public TestResultTypeChangedEventArgs(TestResultType oldResult, TestResultType newResult) { this.oldResult = oldResult; this.newResult = newResult; }
public void Teardown() { TestResultType resultType = this.GetResultType(); bool forceTestFailure = false; // Switch the test to a failure if we have a soft assert failure if (!this.SoftAssert.DidUserCheck() && this.SoftAssert.DidSoftAssertsFail()) { resultType = TestResultType.FAIL; forceTestFailure = true; this.SoftAssert.LogFinalAssertData(); } // Log the test result if (resultType == TestResultType.PASS) { this.TryToLog(MessageType.SUCCESS, "Test passed"); } else if (resultType == TestResultType.FAIL) { this.TryToLog(MessageType.ERROR, "Test failed"); } else if (resultType == TestResultType.INCONCLUSIVE) { this.TryToLog(MessageType.ERROR, "Test was inconclusive"); } else { this.TryToLog(MessageType.WARNING, "Test had an unexpected result of {0}", this.GetResultText()); } // Cleanup log files we don't want try { if (this.Log is FileLogger && resultType == TestResultType.PASS && this.LoggingEnabledSetting == LoggingEnabled.ONFAIL) { File.Delete(((FileLogger)this.Log).FilePath); } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to cleanup log files because: {0}", e.Message); } // Get the Fully Qualified Test Name string fullyQualifiedTestName = this.GetFullyQualifiedTestClassName(); if (this.PerfTimerCollectionSet.ContainsKey(fullyQualifiedTestName)) { PerfTimerCollection collection = this.PerfTimerCollectionSet[fullyQualifiedTestName]; // Write out the performance timers collection.Write(this.Log); // Release the perf time collection for the test this.PerfTimerCollectionSet.TryRemove(fullyQualifiedTestName, out collection); collection = null; } // Attach log and screen shot if we can this.AttachLogAndSceenshot(fullyQualifiedTestName); // Release the logged messages List <string> loggedMessages; this.LoggedExceptions.TryRemove(fullyQualifiedTestName, out loggedMessages); loggedMessages = null; // Relese the soft assert object SoftAssert softAssert; this.SoftAsserts.TryRemove(fullyQualifiedTestName, out softAssert); softAssert = null; // Release the logger Logger logger; this.Loggers.TryRemove(fullyQualifiedTestName, out logger); logger = null; // Relese the base test object BaseTestObject baseTestObject; this.BaseTestObjects.TryRemove(fullyQualifiedTestName, out baseTestObject); baseTestObject = null; // Force the test to fail if (forceTestFailure) { throw new Exception("Test was forced to fail in the cleanup - Likely the result of a soft assert failure."); } }
/// <summary> /// A method to save execution status /// </summary> /// <param name="testContext"></param> public static void SaveTestStatus(TestContext testContext, TestResultType resulttype, Result result) { result.WriteTestResult(resulttype, testContext.TestName, TakeScreenshot(testContext)); }
public TestResult(R result, TestResultType type) { Result = result; Type = type; _data = new Dictionary <string, object>(); }
/// <summary> /// Asserts BoostTestResult against the expected details /// </summary> /// <param name="testResult">The BoostTestResult to test</param> /// <param name="parentTestResult">The expected parent BoostTestResult of testResult</param> /// <param name="name">The expected TestCase display name</param> /// <param name="result">The expected TestCase execution result</param> /// <param name="assertionsPassed">The expected number of passed assertions (e.g. BOOST_CHECKS)</param> /// <param name="assertionsFailed">The expected number of failed assertions (e.g. BOOST_CHECKS, BOOST_REQUIRE, BOOST_FAIL etc.)</param> /// <param name="expectedFailures">The expected number of expected test failures</param> /// <param name="testCasesPassed">The expected number of passed child TestCases</param> /// <param name="testCasesFailed">The expected number of failed child TestCases</param> /// <param name="testCasesSkipped">The expected number of skipped child TestCases</param> /// <param name="testCasesAborted">The expected number of aborted child TestCases</param> private void AssertReportDetails( BoostTestResult testResult, BoostTestResult parentTestResult, string name, TestResultType result, uint assertionsPassed, uint assertionsFailed, uint expectedFailures, uint testCasesPassed, uint testCasesFailed, uint testCasesSkipped, uint testCasesAborted ) { AssertReportDetails(testResult, parentTestResult, name, result, assertionsPassed, assertionsFailed, expectedFailures); Assert.That(testResult.TestCasesPassed, Is.EqualTo(testCasesPassed)); Assert.That(testResult.TestCasesFailed, Is.EqualTo(testCasesFailed)); Assert.That(testResult.TestCasesSkipped, Is.EqualTo(testCasesSkipped)); Assert.That(testResult.TestCasesAborted, Is.EqualTo(testCasesAborted)); }
/// <summary> /// Calls the base class's UpdateImageListIndex method. /// </summary> public void CallUpdateImageListIndex(TestResultType result) { base.UpdateImageListIndex(result); }
public TestResult(uint id, TestResultType result) { Id = id; Result = result; }
/// <summary> /// Returns the number of contained test cases which are of the specified Result type. /// </summary> /// <param name="type">The result type to lookup</param> /// <returns>The number of contained test cases which are of the specified Result type.</returns> private uint GetCount(TestResultType type) { return GetCount(new TestResultType[] { type }); }
/// <summary> /// Converts a Boost.Test.Result.Result enumeration into an equivalent /// Microsoft.VisualStudio.TestPlatform.ObjectModel.TestOutcome. /// </summary> /// <param name="result">The Boost.Test.Result.Result value to convert.</param> /// <returns>The Boost.Test.Result.Result enumeration converted into Microsoft.VisualStudio.TestPlatform.ObjectModel.TestOutcome.</returns> private static VSTestOutcome GetTestOutcome(TestResultType result) { switch (result) { case TestResultType.Passed: return VSTestOutcome.Passed; case TestResultType.Skipped: return VSTestOutcome.Skipped; case TestResultType.Failed: case TestResultType.Aborted: default: return VSTestOutcome.Failed; } }
void WriteResult(TestResultType resultType) { string result = GetResult(resultType); WriteNameAndValue("Result", result); }