public void PerfStartTimerThrowException() { PerfTimerCollection p = this.PerfTimerCollection; p.StartTimer("alreadyStarted"); p.StartTimer("alreadyStarted"); }
/// <summary> /// Initializes a new instance of the <see cref="BaseTestObject" /> class /// </summary> /// <param name="logger">The test's logger</param> /// <param name="softAssert">The test's soft assert</param> /// <param name="perfTimerCollection">The test's performance timer collection</param> public BaseTestObject(Logger logger, SoftAssert softAssert, PerfTimerCollection perfTimerCollection) { this.Log = logger; this.SoftAssert = softAssert; this.PerfTimerCollection = perfTimerCollection; this.Values = new Dictionary <string, string>(); this.Objects = new Dictionary <string, object>(); }
public void SeleniumCreateNewTestObject() { this.CreateNewTestObject(); SeleniumTestObject newTestObject = TestObject; Assert.AreEqual(WebDriver.ToString(), newTestObject.WebDriver.ToString()); Assert.AreEqual(Log.ToString(), newTestObject.Log.ToString()); Assert.AreEqual(SoftAssert.ToString(), newTestObject.SoftAssert.ToString()); Assert.AreEqual(PerfTimerCollection.ToString(), newTestObject.PerfTimerCollection.ToString()); }
public void PerfTimerWriteException() { // Invalid testName PerfTimerCollection p = new PerfTimerCollection("<>"); p.StartTimer("testTimer"); p.EndTimer("testTimer"); FileLogger log = new FileLogger("PerfTimerWriteException", "PerfTimerWriteException", MessageType.GENERIC, true); p.Write(log); Assert.IsTrue(File.ReadAllText(log.FilePath).Contains("Could not save response time file. Error was:")); }
public void PerfTimerWriteException() { // Create an invalid testName using characters not allowed on host OS PerfTimerCollection p = new PerfTimerCollection("<>" + Path.GetInvalidFileNameChars().Aggregate("", (curr, next) => curr + next)); p.StartTimer("testTimer"); p.StopTimer("testTimer"); FileLogger log = new FileLogger("PerfTimerWriteException", "PerfTimerWriteException", MessageType.GENERIC, true); p.Write(log); // Tests that an exception is thrown and logged. Assert.IsTrue(File.ReadAllText(log.FilePath).Contains("Could not save response time file. Error was:")); }
public void PerfStartStop2Timers() { PerfTimerCollection p = this.PerfTimerCollection; // build an object to store in the payloadstring of the PerfTimerCollection this.tc = new Tconfig { LogPath = Config.GetGeneralValue("FileLoggerPath"), Logtype = Config.GetGeneralValue("LogType"), WebURI = Config.GetGeneralValue("WebServiceUri") }; // store it (as a JSON string) p.PerfPayloadString = JsonConvert.SerializeObject(this.tc); string json_string = p.PerfPayloadString; p.StartTimer("Outer", "test1"); System.Threading.Thread.Sleep(1000); p.StartTimer("Inner", "test2"); System.Threading.Thread.Sleep(1000); p.StopTimer("test1"); p.StopTimer("test2"); // Write the log and validate the resulting file contents p.Write(this.Log); string filepath = Path.Combine(LoggingConfig.GetLogDirectory(), p.FileName); // If the file doesn't exist, just bail Assert.IsTrue(File.Exists(filepath), "File Check : Expected File does not exist:" + filepath); // Otherwise record the assertion as true and continue... SoftAssert.Assert(() => Assert.IsTrue(true, "File Check : Expected File exists.")); PerfTimerCollection r = PerfTimerCollection.LoadPerfTimerCollection(filepath); // Payload check SoftAssert.Assert(() => Assert.AreEqual(json_string, r.PerfPayloadString, "Payload", "Validated Payload (json)")); // There should be 2 timers SoftAssert.Assert(() => Assert.AreEqual(2.ToString(), r.Timerlist.Count.ToString(), "Expected number of timers")); // Check the timers int badnamecount = 0; foreach (PerfTimer pt in r.Timerlist) { switch (pt.TimerName) { // Timer = test1 should have a context of Outer case "test1": SoftAssert.Assert(() => Assert.AreEqual("Outer", pt.TimerContext, "test1", "Test1 Context")); break; // Timer = test2 should have an empty context case "test2": SoftAssert.Assert(() => Assert.AreEqual("Inner", pt.TimerContext, "test2", "Test2 Context")); break; // Catch any extra timers default: badnamecount++; SoftAssert.Assert(() => Assert.IsTrue(false, "ExtraTimer", "Extra timer present: " + pt.TimerName)); break; } } if (badnamecount != 0) { // We would have logged any extra timers, so pass the ExtraTimer assert SoftAssert.Assert(() => Assert.IsTrue(true, "ExtraTimer")); } SoftAssert.FailTestIfAssertFailed(); }
public void PerfTimerConstructorTest() { PerfTimerCollection p = new PerfTimerCollection("testTimer"); Assert.AreEqual("testTimer", p.TestName); }
public void PerfEndTimerThrowException() { PerfTimerCollection p = this.PerfTimerCollection; p.StopTimer("notStarted"); }
public void PerfDontStopTimer() { PerfTimerCollection r; PerfTimerCollection p = this.PerfTimerCollection; string filepath; p.StartTimer("StoppedOuter", "test1"); p.StartTimer("test2"); System.Threading.Thread.Sleep(1000); p.StopTimer("test1"); p.StopTimer("test2"); p.StartTimer("NotStopped", "test3"); // Write the log and validate the resulting file contents p.Write(this.Log); filepath = Path.Combine(LoggingConfig.GetLogDirectory(), p.FileName); // If the file doesn't exist, just bail Assert.IsTrue(File.Exists(filepath), "File Check : Expected File does not exist:" + filepath); // Otherwise record the assertion as true and continue... SoftAssert.Assert(() => Assert.IsTrue(true, "File Check : Expected File exists.")); r = PerfTimerCollection.LoadPerfTimerCollection(filepath); // Payload should be empty SoftAssert.Assert(() => Assert.IsTrue(string.IsNullOrEmpty(r.PerfPayloadString), "EmptyPayload", "Payload was not Empty! Contained: " + r.PerfPayloadString)); // There should be 2 timers SoftAssert.Assert(() => Assert.AreEqual(2.ToString(), r.Timerlist.Count.ToString(), "Expected number of timers")); // Check the timers int badnamecount = 0; foreach (PerfTimer pt in r.Timerlist) { switch (pt.TimerName) { // Timer = test1 should have a context of StoppedOuter case "test1": SoftAssert.Assert(() => Assert.AreEqual("StoppedOuter", pt.TimerContext, "test1", "Test1 Context")); break; // Timer = test2 should have an empty context case "test2": SoftAssert.Assert(() => Assert.IsTrue(string.IsNullOrEmpty(pt.TimerContext), "Timer2Context", "Context for " + pt.TimerName + " was not Empty! Contained: " + pt.TimerContext)); break; // Catch any extra timers default: badnamecount++; SoftAssert.Assert(() => Assert.IsTrue(false, "ExtraTimer", "Extra timer present: " + pt.TimerName)); break; } } if (badnamecount != 0) { // We would have logged any extra timers, so pass the ExtraTimer assert SoftAssert.Assert(() => Assert.IsTrue(true, "ExtraTimer")); } SoftAssert.FailTestIfAssertFailed(); }
public void Teardown() { TestResultType resultType = this.GetResultType(); bool forceTestFailure = false; // Switch the test to a failure if we have a soft assert failure if (!this.SoftAssert.DidUserCheck() && this.SoftAssert.DidSoftAssertsFail()) { resultType = TestResultType.FAIL; forceTestFailure = true; this.SoftAssert.LogFinalAssertData(); } // Log the test result if (resultType == TestResultType.PASS) { this.TryToLog(MessageType.SUCCESS, "Test passed"); } else if (resultType == TestResultType.FAIL) { this.TryToLog(MessageType.ERROR, "Test failed"); } else if (resultType == TestResultType.INCONCLUSIVE) { this.TryToLog(MessageType.ERROR, "Test was inconclusive"); } else { this.TryToLog(MessageType.WARNING, "Test had an unexpected result of {0}", this.GetResultText()); } // Cleanup log files we don't want try { if (this.Log is FileLogger && resultType == TestResultType.PASS && this.LoggingEnabledSetting == LoggingEnabled.ONFAIL) { File.Delete(((FileLogger)this.Log).FilePath); } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to cleanup log files because: {0}", e.Message); } // Get the Fully Qualified Test Name string fullyQualifiedTestName = this.GetFullyQualifiedTestClassName(); if (this.PerfTimerCollectionSet.ContainsKey(fullyQualifiedTestName)) { PerfTimerCollection collection = this.PerfTimerCollectionSet[fullyQualifiedTestName]; // Write out the performance timers collection.Write(this.Log); // Release the perf time collection for the test this.PerfTimerCollectionSet.TryRemove(fullyQualifiedTestName, out collection); collection = null; } // Attach log and screen shot if we can this.AttachLogAndSceenshot(fullyQualifiedTestName); // Release the logged messages List <string> loggedMessages; this.LoggedExceptions.TryRemove(fullyQualifiedTestName, out loggedMessages); loggedMessages = null; // Relese the soft assert object SoftAssert softAssert; this.SoftAsserts.TryRemove(fullyQualifiedTestName, out softAssert); softAssert = null; // Release the logger Logger logger; this.Loggers.TryRemove(fullyQualifiedTestName, out logger); logger = null; // Relese the base test object BaseTestObject baseTestObject; this.BaseTestObjects.TryRemove(fullyQualifiedTestName, out baseTestObject); baseTestObject = null; // Force the test to fail if (forceTestFailure) { throw new Exception("Test was forced to fail in the cleanup - Likely the result of a soft assert failure."); } }
public void Teardown() { // Get the Fully Qualified Test Name string fullyQualifiedTestName = this.GetFullyQualifiedTestClassName(); try { TestResultType resultType = this.GetResultType(); bool forceTestFailure = false; // Switch the test to a failure if we have a soft assert failure if (!this.SoftAssert.DidUserCheck() && this.SoftAssert.DidSoftAssertsFail()) { resultType = TestResultType.FAIL; forceTestFailure = true; this.SoftAssert.LogFinalAssertData(); } // Log the test result if (resultType == TestResultType.PASS) { this.TryToLog(MessageType.SUCCESS, "Test passed"); } else if (resultType == TestResultType.FAIL) { this.TryToLog(MessageType.ERROR, "Test failed"); } else if (resultType == TestResultType.INCONCLUSIVE) { this.TryToLog(MessageType.ERROR, "Test was inconclusive"); } else { this.TryToLog(MessageType.WARNING, "Test had an unexpected result of {0}", this.GetResultText()); } this.BeforeLoggingTeardown(resultType); // Cleanup log files we don't want try { if (this.Log is FileLogger && resultType == TestResultType.PASS && this.LoggingEnabledSetting == LoggingEnabled.ONFAIL) { File.Delete(((FileLogger)this.Log).FilePath); } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to cleanup log files because: {0}", e.Message); } PerfTimerCollection collection = this.TestObject.PerfTimerCollection; // Write out the performance timers collection.Write(this.Log); if (collection.FileName != null) { this.TestObject.AddAssociatedFile(LoggingConfig.GetLogDirectory() + "\\" + collection.FileName); } // Attach associated files if we can this.AttachAssociatedFiles(); // Release the logged messages this.LoggedExceptions.TryRemove(fullyQualifiedTestName, out List <string> loggedMessages); // Force the test to fail if (forceTestFailure) { throw new AssertFailedException("Test was forced to fail in the cleanup - Likely the result of a soft assert failure."); } } finally { // Release the base test object this.BaseTestObjects.TryRemove(fullyQualifiedTestName, out BaseTestObject baseTestObject); baseTestObject.Dispose(); } }
public void Teardown() { TestResultType resultType = this.GetResultType(); // Check if Soft Alerts were checked in the test if (!this.SoftAssert.DidUserCheck()) { this.TryToLog(MessageType.WARNING, "User did not check for soft asserts"); } // Log the test result if (resultType == TestResultType.PASS) { this.TryToLog(MessageType.SUCCESS, "Test passed"); } else if (resultType == TestResultType.FAIL) { this.TryToLog(MessageType.ERROR, "Test failed"); } else if (resultType == TestResultType.INCONCLUSIVE) { this.TryToLog(MessageType.ERROR, "Test was inconclusive"); } else { this.TryToLog(MessageType.WARNING, "Test had an unexpected result of {0}", this.GetResultText()); } // Cleanup log files we don't want try { if (this.Log is FileLogger && resultType == TestResultType.PASS && this.LoggingEnabledSetting == LoggingEnabled.ONFAIL) { File.Delete(((FileLogger)this.Log).FilePath); } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to cleanup log files because: {0}", e.Message); } // Get the Fully Qualified Test Name string fullyQualifiedTestName = this.GetFullyQualifiedTestClassName(); // Find the PerfTimerCollection for this test string key = fullyQualifiedTestName; if (this.PerfTimerCollectionSet.ContainsKey(key)) { PerfTimerCollection collection = this.PerfTimerCollectionSet[key]; // Write out the performance timers collection.Write(this.Log); // Release the perf time collection for the test this.PerfTimerCollectionSet.TryRemove(key, out collection); collection = null; } // Release the logged messages List <string> loggedMessages; this.LoggedExceptions.TryRemove(fullyQualifiedTestName, out loggedMessages); loggedMessages = null; // Release the logger Logger logger; this.Loggers.TryRemove(fullyQualifiedTestName, out logger); logger = null; // Relese the soft assert object SoftAssert softAssert; this.SoftAsserts.TryRemove(fullyQualifiedTestName, out softAssert); softAssert = null; // Relese the base test object BaseTestObject baseTestObject; this.BaseTestObjects.TryRemove(fullyQualifiedTestName, out baseTestObject); baseTestObject = null; }
/// <summary> /// Initializes a new instance of the <see cref="SeleniumTestObject" /> class /// </summary> /// <param name="webDriver">The test's Selenium web driver</param> /// <param name="logger">The test's logger</param> /// <param name="softAssert">The test's soft assert</param> /// <param name="perfTimerCollection">The test's performance timer collection</param> public SeleniumTestObject(IWebDriver webDriver, Logger logger, SoftAssert softAssert, PerfTimerCollection perfTimerCollection) : base(logger, softAssert, perfTimerCollection) { this.WebDriver = webDriver; }