Exemple #1
0
 public void MarkTestRailsTestCase(bool isSuccess, params int[] testCaseIds)
 {
     if (!IsFailing)
     {
         AutomationReport.MarkTestRailsTestCase(isSuccess ? "Passed" : "Failed", testCaseIds);
     }
 }
Exemple #2
0
        protected IEnumerator Unifier(bool b, bool inverse, string message, FailureContext newFailureContext, params int[] testRailsId)
        {
            //If test was already marked as a failure, and test has flag indicating that it should continue despite failure, ignore.
            if (!AutomationMaster.CurrentTestContext.IsSuccess || ((AutomationMaster.TryContinueOnFailure || MideExecution_MarkTestToTryContinueAfterFail) && IsFailing))
            {
                UnitTestStepFailure = isSoft = isTry = quiet = false;                 //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
                yield break;
            }

            //Automatically label this assertion as quiet if the previous assertion is identical to this one.
            quiet = quiet ? true : AutomationMaster.CurrentTestContext.Assertions.Last() == message;

            _failureContext = newFailureContext;
            if ((!b && inverse) || (b && !inverse))
            {
                if (isTry && !quiet)
                {
                    AutomationMaster.CurrentTestContext.AddAssertion(string.Format("**TRY_SUCCESS**{0}", message));
                    UnitTestStepFailure = isSoft = isTry = quiet = false;                     //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
                    yield break;
                }

                ConcurrentFailures = 0;
                AutomationMaster.CurrentTestContext.IsSuccess = true;
                if (!string.IsNullOrEmpty(message) && !isTry && !quiet)
                {
                    AutomationMaster.CurrentTestContext.AddAssertion(message);
                }
            }
            else
            {
                //TODO: UnitTestStepFailure - Determine if an assertion has failed within the context of a TestObject "steps" method. If so, set this to true. Used to disabled certain TestRunner reactive logic, such as screenshots.

                if (isTry)
                {
                    if (!quiet)
                    {
                        AutomationMaster.CurrentTestContext.AddAssertion(string.Format("**TRY_FAIL**{0}", message));
                    }
                    UnitTestStepFailure = isSoft = isTry = quiet = false;                     //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
                    yield break;
                }

                IsFailing = true;
                bool recordLogDetails = newFailureContext != FailureContext.Skipped;

                SetReflectedTestData();
                string recentLogs = AutomationReport.EncodeCharactersForJson(AutoConsole.ReturnLatestLogs(5));

                if (newFailureContext == FailureContext.Skipped)
                {
                    AutomationMaster.TestRunContext.Skipped.Add(AutomationMaster.CurrentTestContext.TestName);
                }
                else
                {
                    AutomationMaster.TestRunContext.Failed.Add(AutomationMaster.CurrentTestContext.TestName, new string[] {
                        message,
                        recentLogs.ToString(),
                        lineNumber
                    });
                }

                AutomationMaster.CurrentTestContext.IsSuccess = false;
                AutomationMaster.CurrentTestContext.AddAssertion(message);
                AutomationMaster.CurrentTestContext.ErrorDetails += string.Format("Error Message [{0}] : Test Line [{1}] : Debug Logs [{2}] ", message, string.Format("Line [{0}] Call [{1}]", lineNumber, lineCall), (recordLogDetails ? recentLogs : string.Format("#SKIPPED#{0}", message)));
                AutomationMaster.CurrentTestContext.ErrorDetails += string.Format(" FULL STACK: [{0}]", Environment.StackTrace.Replace(" at", string.Format(" {0} at", AutomationMaster.NEW_LINE_INDICATOR)));
                if (failureContext != FailureContext.Skipped)
                {
                    //Take screenshot if a failure is not a "Skip" failure (In which case a test does not run at all, and there is no value in taking a screenshot as the current screen has no relevance to the reason it failed).
                    yield return(StartCoroutine(AutomationMaster.StaticSelfComponent.TakeScreenshot()));

                    screenshotRequestTime = DateTime.UtcNow;
                }

                //Handle errors occurring outside of the context of the current test's execution. Only certain contexts require additional handling over what is offered by default.
                switch (AutomationMaster.ExecutionContext)
                {
                case AutomationMaster.CurrentExecutionContext.SetUpClass:
                    AutomationMaster.AutoSkips.Add(new KeyValuePair <string[], string>(new string[] { "class", AutomationMaster.CurrentTestContext.ClassName }, string.Format("FAILURE OCCURRED IN SETUPCLASS:", message)));
                    break;

                case AutomationMaster.CurrentExecutionContext.SetUp:
                    AutomationMaster.AutoSkips.Add(new KeyValuePair <string[], string>(new string[] { "test", AutomationMaster.CurrentTestContext.TestName }, string.Format("FAILURE OCCURRED IN SETUP:", message)));
                    break;

                case AutomationMaster.CurrentExecutionContext.TearDownClass:
                    yield return(StartCoroutine(Q.assert.Warn(string.Format("A failure occurred in the TearDownClass logic for  the test \"{0}.{1}\". This fails the last-run test, and may cause other undesirable behavior for downstream test execution.", AutomationMaster.CurrentTestContext.ClassName, AutomationMaster.CurrentTestContext.TestName))));

                    //Will automatically handle the failure of this test.
                    break;

                case AutomationMaster.CurrentExecutionContext.TearDown:
                //Will automatically handle the failure of this test.
                case AutomationMaster.CurrentExecutionContext.Test:
                //Will automatically handle the failure of this test.
                default:
                    break;
                }

                if ((AutomationMaster.TryContinueOnFailure || MideExecution_MarkTestToTryContinueAfterFail) && ConcurrentFailures > 5)
                {
                    AutomationMaster.OverrideContinueOnFailureAfterTooManyConcurrentFailures = true;
                }

                                #if UNITY_EDITOR
                AutomationMaster.PauseEditorOnFailure();
                                #endif

                //Any FailureContext beyond TestMethod will not have an instantiated test method.
                if (!AutomationMaster.TryContinueOnFailure)
                {
                    if ((!isSoft && AutomationMaster.OverrideContinueOnFailureAfterTooManyConcurrentFailures) || (!MideExecution_MarkTestToTryContinueAfterFail && (_failureContext == FailureContext.TestMethod || _failureContext == FailureContext.Default) && failureContext != FailureContext.Skipped))
                    {
                        try {
                            AutomationMaster.CurrentTestMethod.Stop();                //Kill current test, only if the currently queued test has been initialized.
                        } catch { }
                        yield return(new WaitForEndOfFrame());                        //Allow all Coroutines to be stopped before returning control. In reality, the coroutine calling this will be stopped, so control will never be returned anyway.
                    }
                }

                if (!isSoft && (AutomationMaster.TryContinueOnFailure || MideExecution_MarkTestToTryContinueAfterFail))
                {
                    ConcurrentFailures++;
                }
            }

            if (testRailsId.Length > 0)
            {
                AutomationReport.MarkTestRailsTestCase(AutomationMaster.CurrentTestContext.IsSuccess ? "Passed" : "Failed", testRailsId);
            }

            AutoConsole.PostMessage(string.Format("Assert [{0}] |{1}| {2}", AutomationMaster.CurrentTestContext.TestName, AutomationMaster.CurrentTestContext.IsSuccess ? "Success" : "Failure", message), MessageLevel.Verbose, ConsoleMessageType.TestRunnerUpdate);
            UnitTestStepFailure = isSoft = isTry = quiet = false;             //Reset the UnitTestStepFailure, Soft, Try, and Quiet flags.
            yield return(null);
        }