public void PerfStartTimerThrowException() { IPerfTimerCollection p = this.PerfTimerCollection; p.StartTimer("alreadyStarted"); p.StartTimer("alreadyStarted"); }
/// <summary> /// Initializes a new instance of the <see cref="BaseTestObject" /> class /// </summary> /// <param name="logger">The test's logger</param> /// <param name="softAssert">The test's soft assert</param> /// <param name="collection">The test's performance timer collection</param> /// <param name="fullyQualifiedTestName">The test's fully qualified test name</param> public BaseTestObject(ILogger logger, ISoftAssert softAssert, IPerfTimerCollection collection, string fullyQualifiedTestName) { this.Log = logger; this.SoftAssert = softAssert; this.PerfTimerCollection = collection; this.Values = new Dictionary <string, string>(); this.Objects = new Dictionary <string, object>(); this.ManagerStore = new ManagerStore(); this.AssociatedFiles = new HashSet <string>(); logger.LogMessage(MessageType.INFORMATION, "Setup test object for " + fullyQualifiedTestName); }
public void Teardown() { // Get the Fully Qualified Test Name string fullyQualifiedTestName = this.GetFullyQualifiedTestClassName(); try { TestResultType resultType = this.GetResultType(); bool forceTestFailure = false; // Switch the test to a failure if we have a soft assert failure this.SoftAssert.CheckForExpectedAsserts(); if (!this.SoftAssert.DidUserCheck() && this.SoftAssert.DidSoftAssertsFail()) { resultType = TestResultType.FAIL; forceTestFailure = true; this.SoftAssert.LogFinalAssertData(); } // Log the test result if (resultType == TestResultType.PASS) { this.TryToLog(MessageType.SUCCESS, "Test passed"); this.WriteAssociatedFilesNamesToLog(); } else if (resultType == TestResultType.FAIL) { this.TryToLog(MessageType.ERROR, "Test failed"); } else if (resultType == TestResultType.INCONCLUSIVE) { this.TryToLog(MessageType.ERROR, "Test was inconclusive"); } else { this.TryToLog(MessageType.WARNING, "Test had an unexpected result of {0}", this.GetResultText()); } this.GetResultTextNunit(); this.LogVerbose("Test outcome"); this.BeforeCleanup(resultType); // Cleanup log files we don't want try { if (this.Log is IFileLogger logger && resultType == TestResultType.PASS && this.LoggingEnabledSetting == LoggingEnabled.ONFAIL) { File.Delete(logger.FilePath); } } catch (Exception e) { this.TryToLog(MessageType.WARNING, "Failed to cleanup log files because: {0}", e.Message); } IPerfTimerCollection collection = this.TestObject.PerfTimerCollection; this.PerfTimerCollection = collection; // Write out the performance timers collection.Write(this.Log); if (collection.FileName != null) { this.TestObject.AddAssociatedFile(LoggingConfig.GetLogDirectory() + "\\" + collection.FileName); } // Attach associated files if we can this.AttachAssociatedFiles(); // Release the logged messages this.LoggedExceptions.TryRemove(fullyQualifiedTestName, out List <string> loggedMessages); // Force the test to fail if (forceTestFailure) { throw new AssertFailedException("Test was forced to fail in the cleanup - Likely the result of a soft assert failure."); } } finally { // Release log this.Log?.Dispose(); // Release the base test object this.BaseTestObjects.TryRemove(fullyQualifiedTestName, out ITestObject baseTestObject); baseTestObject.Dispose(); } }
public void PerfStartStop2Timers() { IPerfTimerCollection p = this.PerfTimerCollection; // build an object to store in the payloadstring of the PerfTimerCollection this.tc = new Tconfig { LogPath = Config.GetGeneralValue("FileLoggerPath"), Logtype = Config.GetGeneralValue("LogType"), WebURI = Config.GetGeneralValue("WebServiceUri") }; // store it (as a JSON string) p.PerfPayloadString = JsonConvert.SerializeObject(this.tc); string json_string = p.PerfPayloadString; p.StartTimer("Outer", "test1"); System.Threading.Thread.Sleep(1000); p.StartTimer("Inner", "test2"); System.Threading.Thread.Sleep(1000); p.StopTimer("test1"); p.StopTimer("test2"); // Write the log and validate the resulting file contents p.Write(this.Log); string filepath = Path.Combine(LoggingConfig.GetLogDirectory(), p.FileName); // If the file doesn't exist, just bail Assert.IsTrue(File.Exists(filepath), "File Check : Expected File does not exist:" + filepath); // Otherwise record the assertion as true and continue... SoftAssert.Assert(() => Assert.IsTrue(true), "File Check : Expected File exists."); IPerfTimerCollection r = PerfTimerCollection.LoadPerfTimerCollection(filepath); // Payload check SoftAssert.Assert(() => Assert.AreEqual(json_string, r.PerfPayloadString), "Payload", "Validated Payload (json)"); // There should be 2 timers SoftAssert.Assert(() => Assert.AreEqual(2.ToString(), r.Timerlist.Count.ToString()), "Expected number of timers"); // Check the timers int badnamecount = 0; foreach (PerfTimer pt in r.Timerlist) { switch (pt.TimerName) { // Timer = test1 should have a context of Outer case "test1": SoftAssert.Assert(() => Assert.AreEqual("Outer", pt.TimerContext), "test1", "Test1 Context"); break; // Timer = test2 should have an empty context case "test2": SoftAssert.Assert(() => Assert.AreEqual("Inner", pt.TimerContext), "test2", "Test2 Context"); break; // Catch any extra timers default: badnamecount++; SoftAssert.Assert(() => Assert.IsTrue(false), "ExtraTimer", "Extra timer present: " + pt.TimerName); break; } } if (badnamecount != 0) { // We would have logged any extra timers, so pass the ExtraTimer assert SoftAssert.Assert(() => Assert.IsTrue(true), "ExtraTimer"); } SoftAssert.FailTestIfAssertFailed(); }
public void PerfEndTimerThrowException() { IPerfTimerCollection p = this.PerfTimerCollection; p.StopTimer("notStarted"); }
public void PerfDontStopTimer() { IPerfTimerCollection r; IPerfTimerCollection p = this.PerfTimerCollection; string filepath; p.StartTimer("StoppedOuter", "test1"); p.StartTimer("test2"); System.Threading.Thread.Sleep(1000); p.StopTimer("test1"); p.StopTimer("test2"); p.StartTimer("NotStopped", "test3"); // Write the log and validate the resulting file contents p.Write(this.Log); filepath = Path.Combine(LoggingConfig.GetLogDirectory(), p.FileName); // If the file doesn't exist, just bail Assert.IsTrue(File.Exists(filepath), "File Check : Expected File does not exist:" + filepath); // Otherwise record the assertion as true and continue... SoftAssert.Assert(() => Assert.IsTrue(true), "File Check : Expected File exists."); r = PerfTimerCollection.LoadPerfTimerCollection(filepath); // Payload should be empty SoftAssert.Assert(() => Assert.IsTrue(string.IsNullOrEmpty(r.PerfPayloadString)), "EmptyPayload", "Payload was not Empty! Contained: " + r.PerfPayloadString); // There should be 2 timers SoftAssert.Assert(() => Assert.AreEqual(2.ToString(), r.Timerlist.Count.ToString()), "Expected number of timers"); // Check the timers int badnamecount = 0; foreach (PerfTimer pt in r.Timerlist) { switch (pt.TimerName) { // Timer = test1 should have a context of StoppedOuter case "test1": SoftAssert.Assert(() => Assert.AreEqual("StoppedOuter", pt.TimerContext), "test1", "Test1 Context"); break; // Timer = test2 should have an empty context case "test2": SoftAssert.Assert(() => Assert.IsTrue(string.IsNullOrEmpty(pt.TimerContext)), "Timer2Context", "Context for " + pt.TimerName + " was not Empty! Contained: " + pt.TimerContext); break; // Catch any extra timers default: badnamecount++; SoftAssert.Assert(() => Assert.IsTrue(false), "ExtraTimer", "Extra timer present: " + pt.TimerName); break; } } if (badnamecount != 0) { // We would have logged any extra timers, so pass the ExtraTimer assert SoftAssert.Assert(() => Assert.IsTrue(true), "ExtraTimer"); } SoftAssert.FailTestIfAssertFailed(); }