Exemple #1
0
		/// <summary>
		/// Return list of most recent logs. Stores a maximum of 25 most recent logs.
		/// </summary>
		/// <returns>The latest logs.</returns>
		/// <param name="numberOfMostRecentLogsToReturn">Number of most recent logs to return.</param>
		public static string ReturnLatestLogs(int numberOfMostRecentLogsToReturn) {
			
			List<Log> returnList = new List<Log>();
			if(Logs.Count <= numberOfMostRecentLogsToReturn) {
				
				returnList = Logs;

			} else {
				
				for(int i = 1; i <= numberOfMostRecentLogsToReturn; i++) {
					
					if(i < 0) {
						
						break;

					}

					Log thisLog = Logs[Logs.Count - i];
					if(!string.IsNullOrEmpty(thisLog.message)) {
						
						returnList.Add(thisLog);

					} else {
						
						Logs.RemoveAt(Logs.Count - i);
						i--;

					}

				}

			}
			StringBuilder logString = new StringBuilder();
			for(int i = 0; i < returnList.Count; i++) {
				
				logString.AppendLine(string.Format("(LOG ENTRY) MESSAGE: {0} ** STACKTRACE: {1} ** TYPE: {2}", returnList[i].message, returnList[i].stackTrace, returnList[i].type.ToString()));

			}
			if(logString.Length > MAX_LOG_RETURN_LENGTH) {
				
				return AutomationReport.EncodeCharactersForJson(logString.ToString().Substring(0, MAX_LOG_RETURN_LENGTH)).Replace(AutomationMaster.DELIMITER.ToString(), "%7C"); //Encode AutomationMaster.DELIMITER character or errors will occur in data parsing in server.

			} else {
				
				return AutomationReport.EncodeCharactersForJson(logString.ToString()).Replace(AutomationMaster.DELIMITER.ToString(), "%7C"); //Encode AutomationMaster.DELIMITER character or errors will occur in data parsing in server.

			}

		}
Exemple #2
0
            public void Add(AutoConsole.Log error)
            {
                GameException ex = new GameException();

                ex.ScreenshotName = string.Format("EXCEPTION_{0}", Reported.Count);
                AutomationMaster.StaticSelfComponent.TakeScreenshotAsync(false, ex.ScreenshotName);
                ex.TimeStamp          = System.DateTime.UtcNow.ToLongDateString();
                ex.TestExecutionTime  = System.DateTime.UtcNow.Subtract(AutomationMaster.CurrentTestContext.StartTime).TotalSeconds.ToString();
                ex.CurrentRunningTest = AutomationMaster.CurrentTestContext.TestName;
                ex.Error        = AutomationReport.EncodeCharactersForJson(error.message);
                ex.ErrorDetails = AutomationReport.EncodeCharactersForJson(error.stackTrace);
                ex.Occurrences  = 1;
                for (int r = 0; r < Reported.Count; r++)
                {
                    if (Reported[r].Error == ex.Error && Reported[r].ErrorDetails == ex.ErrorDetails)
                    {
                        Reported[r].Occurrences++;
                        return;
                    }
                }
                _reported.Add(ex);
            }
Exemple #3
0
            public void Add(AutoConsole.Log error)
            {
                GameException ex = new GameException();

                ex.ScreenshotName     = string.Format("EXCEPTION_{0}", Reported.Count);
                ex.TimeStamp          = System.DateTime.UtcNow.ToLongDateString();
                ex.TestExecutionTime  = System.DateTime.UtcNow.Subtract(AutomationMaster.CurrentTestContext.StartTime).TotalSeconds.ToString();
                ex.CurrentRunningTest = AutomationMaster.CurrentTestContext.TestName;
                ex.Error        = AutomationReport.EncodeCharactersForJson(error.message).Replace(AutomationMaster.DELIMITER.ToString(), "%7C");          //Encode AutomationMaster.DELIMITER character or errors will occur in data parsing in server.
                ex.ErrorDetails = AutomationReport.EncodeCharactersForJson(error.stackTrace).Replace(AutomationMaster.DELIMITER.ToString(), "%7C");       //Encode AutomationMaster.DELIMITER character or errors will occur in data parsing in server.
                ex.Occurrences  = 1;
                for (int r = 0; r < Reported.Count; r++)
                {
                    if (Reported[r].Error == ex.Error && Reported[r].ErrorDetails == ex.ErrorDetails)
                    {
                        Reported[r].Occurrences++;
                        return;
                    }
                }
                AutomationMaster.StaticSelfComponent.TakeScreenshotAsync(false, ex.ScreenshotName);                 //Only take a screenshot if it is not a duplicate. Spammed errors would lead to spammed screenshots.
                _reported.Add(ex);
            }
Exemple #4
0
 public void AddTestCaseAssertionOnFailure(string newAssertion)
 {
     //Add test case assertion before the last failed assertion.
     Assertions.AddAt(Assertions.Count - 2, AutomationReport.EncodeCharactersForJson(newAssertion));
 }
Exemple #5
0
 public void AddAssertion(string newAssertion)
 {
     Assertions.Add(AutomationReport.EncodeCharactersForJson(newAssertion));
 }
Exemple #6
0
        protected IEnumerator Unifier(bool b, bool inverse, string message, FailureContext newFailureContext, params int[] testRailsId)
        {
            //If test was already marked as a failure, and test has flag indicating that it should continue despite failure, ignore.
            if (!AutomationMaster.CurrentTestContext.IsSuccess || ((AutomationMaster.TryContinueOnFailure || MideExecution_MarkTestToTryContinueAfterFail) && IsFailing))
            {
                UnitTestStepFailure = isSoft = isTry = quiet = false;                 //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
                yield break;
            }

            //Automatically label this assertion as quiet if the previous assertion is identical to this one.
            quiet = quiet ? true : AutomationMaster.CurrentTestContext.Assertions.Last() == message;

            _failureContext = newFailureContext;
            if ((!b && inverse) || (b && !inverse))
            {
                if (isTry && !quiet)
                {
                    AutomationMaster.CurrentTestContext.AddAssertion(string.Format("**TRY_SUCCESS**{0}", message));
                    UnitTestStepFailure = isSoft = isTry = quiet = false;                     //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
                    yield break;
                }

                ConcurrentFailures = 0;
                AutomationMaster.CurrentTestContext.IsSuccess = true;
                if (!string.IsNullOrEmpty(message) && !isTry && !quiet)
                {
                    AutomationMaster.CurrentTestContext.AddAssertion(message);
                }
            }
            else
            {
                //TODO: UnitTestStepFailure - Determine if an assertion has failed within the context of a TestObject "steps" method. If so, set this to true. Used to disabled certain TestRunner reactive logic, such as screenshots.

                if (isTry)
                {
                    if (!quiet)
                    {
                        AutomationMaster.CurrentTestContext.AddAssertion(string.Format("**TRY_FAIL**{0}", message));
                    }
                    UnitTestStepFailure = isSoft = isTry = quiet = false;                     //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
                    yield break;
                }

                IsFailing = true;
                bool recordLogDetails = newFailureContext != FailureContext.Skipped;

                SetReflectedTestData();
                string recentLogs = AutomationReport.EncodeCharactersForJson(AutoConsole.ReturnLatestLogs(5));

                if (newFailureContext == FailureContext.Skipped)
                {
                    AutomationMaster.TestRunContext.Skipped.Add(AutomationMaster.CurrentTestContext.TestName);
                }
                else
                {
                    AutomationMaster.TestRunContext.Failed.Add(AutomationMaster.CurrentTestContext.TestName, new string[] {
                        message,
                        recentLogs.ToString(),
                        lineNumber
                    });
                }

                AutomationMaster.CurrentTestContext.IsSuccess = false;
                AutomationMaster.CurrentTestContext.AddAssertion(message);
                AutomationMaster.CurrentTestContext.ErrorDetails += string.Format("Error Message [{0}] : Test Line [{1}] : Debug Logs [{2}] ", message, string.Format("Line [{0}] Call [{1}]", lineNumber, lineCall), (recordLogDetails ? recentLogs : string.Format("#SKIPPED#{0}", message)));
                AutomationMaster.CurrentTestContext.ErrorDetails += string.Format(" FULL STACK: [{0}]", Environment.StackTrace.Replace(" at", string.Format(" {0} at", AutomationMaster.NEW_LINE_INDICATOR)));
                if (failureContext != FailureContext.Skipped)
                {
                    //Take screenshot if a failure is not a "Skip" failure (In which case a test does not run at all, and there is no value in taking a screenshot as the current screen has no relevance to the reason it failed).
                    yield return(StartCoroutine(AutomationMaster.StaticSelfComponent.TakeScreenshot()));

                    screenshotRequestTime = DateTime.UtcNow;
                }

                //Handle errors occurring outside of the context of the current test's execution. Only certain contexts require additional handling over what is offered by default.
                switch (AutomationMaster.ExecutionContext)
                {
                case AutomationMaster.CurrentExecutionContext.SetUpClass:
                    AutomationMaster.AutoSkips.Add(new KeyValuePair <string[], string>(new string[] { "class", AutomationMaster.CurrentTestContext.ClassName }, string.Format("FAILURE OCCURRED IN SETUPCLASS:", message)));
                    break;

                case AutomationMaster.CurrentExecutionContext.SetUp:
                    AutomationMaster.AutoSkips.Add(new KeyValuePair <string[], string>(new string[] { "test", AutomationMaster.CurrentTestContext.TestName }, string.Format("FAILURE OCCURRED IN SETUP:", message)));
                    break;

                case AutomationMaster.CurrentExecutionContext.TearDownClass:
                    yield return(StartCoroutine(Q.assert.Warn(string.Format("A failure occurred in the TearDownClass logic for  the test \"{0}.{1}\". This fails the last-run test, and may cause other undesirable behavior for downstream test execution.", AutomationMaster.CurrentTestContext.ClassName, AutomationMaster.CurrentTestContext.TestName))));

                    //Will automatically handle the failure of this test.
                    break;

                case AutomationMaster.CurrentExecutionContext.TearDown:
                //Will automatically handle the failure of this test.
                case AutomationMaster.CurrentExecutionContext.Test:
                //Will automatically handle the failure of this test.
                default:
                    break;
                }

                if ((AutomationMaster.TryContinueOnFailure || MideExecution_MarkTestToTryContinueAfterFail) && ConcurrentFailures > 5)
                {
                    AutomationMaster.OverrideContinueOnFailureAfterTooManyConcurrentFailures = true;
                }

                                #if UNITY_EDITOR
                AutomationMaster.PauseEditorOnFailure();
                                #endif

                //Any FailureContext beyond TestMethod will not have an instantiated test method.
                if (!AutomationMaster.TryContinueOnFailure)
                {
                    if ((!isSoft && AutomationMaster.OverrideContinueOnFailureAfterTooManyConcurrentFailures) || (!MideExecution_MarkTestToTryContinueAfterFail && (_failureContext == FailureContext.TestMethod || _failureContext == FailureContext.Default) && failureContext != FailureContext.Skipped))
                    {
                        try {
                            AutomationMaster.CurrentTestMethod.Stop();                //Kill current test, only if the currently queued test has been initialized.
                        } catch { }
                        yield return(new WaitForEndOfFrame());                        //Allow all Coroutines to be stopped before returning control. In reality, the coroutine calling this will be stopped, so control will never be returned anyway.
                    }
                }

                if (!isSoft && (AutomationMaster.TryContinueOnFailure || MideExecution_MarkTestToTryContinueAfterFail))
                {
                    ConcurrentFailures++;
                }
            }

            if (testRailsId.Length > 0)
            {
                AutomationReport.MarkTestRailsTestCase(AutomationMaster.CurrentTestContext.IsSuccess ? "Passed" : "Failed", testRailsId);
            }

            AutoConsole.PostMessage(string.Format("Assert [{0}] |{1}| {2}", AutomationMaster.CurrentTestContext.TestName, AutomationMaster.CurrentTestContext.IsSuccess ? "Success" : "Failure", message), MessageLevel.Verbose, ConsoleMessageType.TestRunnerUpdate);
            UnitTestStepFailure = isSoft = isTry = quiet = false;             //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
            yield return(null);
        }