Exemple #1
0
 private static void OnAfterCreateNode(ITestNode node)
 {
     if (node.GetType().GetInterfaces().Contains((typeof(ITestCase))))
     {
         Console.Write(".");
     }
 }
Exemple #2
0
        private static void WriteTestResult(ITestNode testNode, IFixtureResult result)
        {
            ConsoleText.ColorCodes cColor = ConsoleText.ColorCodes.Red;
            string status = " FAIL! ";

            if (result.Right == (result.Right + result.Wrong + result.Exceptions))
            {
                cColor = ConsoleText.ColorCodes.Green;
                status = " PASS! ";
            }
            else if ((result.Exceptions != 0) && (result.Wrong == 0))
            {
                cColor = ConsoleText.ColorCodes.Yellow;
                status = " FAIL! ";
            }
            else
            {
                cColor = ConsoleText.ColorCodes.Red;
                status = " FAIL! ";
            }

            ConsoleText.SetColor(cColor);
            Console.Write(status);
            ConsoleText.SetColor(ConsoleText.DEFAULT_COLOR);
            if (testNode != null)
            {
                Console.Write(testNode.Name + "\t");
            }
            Console.Write("Assertions:\t Pass:{0} \t Fail:{1} \t Exception:{2} \t {3}", result.Right, result.Wrong,
                          result.Exceptions, result.GetTimeSpan().TotalSeconds + "sec");
        }
        private void AddRelationship(ITestNode node, ITestNode baseNode, ModelRelationshipStereotype stereotype)
        {
            var underlyingNode     = ModelService.GetUnderlyingNode(node);
            var underlyingBaseNode = ModelService.GetUnderlyingNode(baseNode);

            ModelService.AddRelationship(underlyingNode.Id, underlyingBaseNode.Id, stereotype);
        }
Exemple #4
0
 private void NotifyOnAfterCreateNode(ITestNode node)
 {
     if (OnAfterCreateNode != null)
     {
         OnAfterCreateNode(node);
     }
 }
Exemple #5
0
        /// <summary>
        /// Create the list of tests specified by the context.
        /// </summary>
        /// <param name="Context"></param>
        /// <returns></returns>
        IEnumerable <ITestNode> CreateTestList(UnrealTestContext Context, Params DefaultParams, ArgumentWithParams PlatformParams = null)
        {
            List <ITestNode> NodeList = new List <ITestNode>();

            IEnumerable <string> Namespaces = Context.Options.Namespaces.Split(',').Select(S => S.Trim());

            List <string> BuildIssues = new List <string>();

            UnrealTargetPlatform UnrealPlatform = UnrealTargetPlatform.Unknown;

            if (!Enum.TryParse(PlatformParams.Argument, true, out UnrealPlatform))
            {
                throw new AutomationException("Could not convert platform {0} to a valid UnrealTargetPlatform", PlatformParams.Argument);
            }

            //List<string> Platforms = Globals.Params.ParseValue("platform")

            // Create an instance of each test and add it to the executor
            foreach (var Test in Context.Options.TestList)
            {
                // create a copy of the context for this test
                UnrealTestContext TestContext = (UnrealTestContext)Context.Clone();

                // if test specifies platforms, filter for this context
                if (Test.Platforms.Count() > 0 && Test.Platforms.Where(Plat => Plat.Argument == PlatformParams.Argument).Count() == 0)
                {
                    continue;
                }

                if (Blacklist.Instance.IsTestBlacklisted(Test.TestName, UnrealPlatform, TestContext.BuildInfo.Branch))
                {
                    Gauntlet.Log.Info("Test {0} is currently blacklisted on {1} in branch {2}", Test.TestName, UnrealPlatform, TestContext.BuildInfo.Branch);
                    continue;
                }

                // combine global and test-specific params
                Params CombinedParams = new Params(DefaultParams.AllArguments.Concat(Test.TestParams.AllArguments).ToArray());

                // parse any target constraints
                List <string> PerfSpecArgs = CombinedParams.ParseValues("PerfSpec", false);
                string        PerfSpecArg  = PerfSpecArgs.Count > 0 ? PerfSpecArgs.Last() : "Unspecified";
                EPerfSpec     PerfSpec;
                if (!Enum.TryParse <EPerfSpec>(PerfSpecArg, true, out PerfSpec))
                {
                    throw new AutomationException("Unable to convert perfspec '{0}' into an EPerfSpec", PerfSpec);
                }

                TestContext.Constraint = new UnrealTargetConstraint(UnrealPlatform, PerfSpec);

                TestContext.TestParams = CombinedParams;

                // This will throw if the test cannot be created
                ITestNode NewTest = Utils.TestConstructor.ConstructTest <ITestNode, UnrealTestContext>(Test.TestName, TestContext, Namespaces);

                NodeList.Add(NewTest);
            }

            return(NodeList);
        }
        public static string GetFullPath(ITestNode node)
        {
            string parentPath = string.Empty;

            FindParentPath(node, ref parentPath);
            parentPath += node.Name;
            return(parentPath);
        }
Exemple #7
0
        private void CreateTextTable(int tableId, ITestNode testNode, ref StringBuilder sb)
        {
            var textDataTable = Services.Get <IFixtureTextData>(tableId + "");

            textDataTable.ContentText = sb.ToString();
            sb.Length = 0;
            NotifyOnAfterCreateNode(textDataTable);
            testNode.AddChildNode(textDataTable);
        }
        public void AddNode(ITestNode node, ITestNode parentNode = null)
        {
            var stereotype          = GetNodeType(node);
            var parentWrapperNodeId = parentNode == null ? null : (ModelNodeId?)GetWrapperNode(parentNode).Id;

            var wrapperNode = _modelService.AddNode(node.Name, stereotype, node, parentWrapperNodeId);

            AddItemToCurrentGroup(wrapperNode);
        }
 public static void FindParentPath(ITestNode node, ref string parentPath)
 {
     if ((node.ParentNode == null))
     {
         return;
     }
     parentPath = parentPath.Insert(0, node.ParentNode.Name + "\\");
     FindParentPath(node.ParentNode, ref parentPath);
 }
Exemple #10
0
 private string GetOutputFile(ITestNode node)
 {
     if (Directory.Exists(OutputDir))
         Directory.CreateDirectory(OutputDir);
     _runningNumber++;
     string runningFileName = _runningNumber.ToString();
     runningFileName = runningFileName.PadLeft(6, '0');
     string status = (node.TestResult.Pass) ? "Pass" : "Fail";
     return OutputDir + "\\" + runningFileName + "-" + status + ".html";
 }
Exemple #11
0
        public void LoadData(ITestNode node)
        {
            var localNode = node as IRedefineTestNodeProperties;

            if (localNode != null)
            {
                Services.Get <IUtilService>().ReadFromFile(
                    localNode.FullFileName, localNode.ContentList);
            }
            Parse(localNode);
        }
Exemple #12
0
 private static void OnTestComplete(ITestNode node)
 {
     WriteTestResult(node, node.TestResult);
     Console.WriteLine();
     if ((_exitOnFail) && (node.TestResult.Wrong + node.TestResult.Exceptions != 0))
     {
         var addIn = node.Services.Get <ITestFrameworkAddin>();
         GenerateReport(addIn.GetReportWriter());
         Console.WriteLine("test run stopped on failure.");
         Environment.Exit(11);
     }
 }
 public static void GetAllChildren(ITestNode node, ref IList <ITestNode> list)
 {
     if (node == null)
     {
         return;
     }
     foreach (ITestNode selectedNode in node)
     {
         list.Add(selectedNode);
         GetAllChildren(selectedNode, ref list);
     }
 }
 private static void ApplyNodeLevel(ITestNode node)
 {
     if (node == null)
     {
         return;
     }
     foreach (ITestNode selectedNode in node)
     {
         selectedNode.LevelNo = selectedNode.ParentNode.LevelNo + 1;
         ApplyNodeLevel(selectedNode);
     }
 }
Exemple #15
0
 private static void OnErrorProcessingNodes(ITestNode node)
 {
     if (node is ITestLink)
     {
         ConsoleText.SetColor(ConsoleText.ColorCodes.Yellow);
         Console.WriteLine();
         Console.Write("Warning: processing include scripts: [{0}]:[{1}]", node.ParentNode, node);
     }
     else
     {
         ConsoleText.SetColor(ConsoleText.ColorCodes.Red);
         Console.WriteLine();
         Console.Write("Unidentified error while processing node " + node);
     }
     ConsoleText.SetColor(ConsoleText.DEFAULT_COLOR);
 }
        private static ModelNodeStereotype GetNodeType(ITestNode node)
        {
            switch (node)
            {
            case ClassNode _:
                return(ModelNodeStereotypes.Class);

            case InterfaceNode _:
                return(ModelNodeStereotypes.Interface);

            case PropertyNode _:
                return(ModelNodeStereotypes.Property);

            default:
                throw new Exception($"Unexpected node type {node?.GetType().Name}");
            }
        }
Exemple #17
0
 public void GetVariables(ITestNode node, ref IList <string> varList)
 {
     if (node == null)
     {
         return;
     }
     foreach (ITestNode selectedNode in node)
     {
         if ((selectedNode as IFixtureTextData) != null)
         {
             var      fixtureTextdata = selectedNode as IFixtureTextData;
             string[] lines           = fixtureTextdata.ContentText.Split('\r');
             foreach (string s in lines)
             {
                 string line = s;
                 if (line.ToLower().Contains("!path"))
                 {
                     line = line.Replace("\n", "");
                     line = line.Replace("\r", "").Trim();
                     string valueToAdd = "path=" + line.Substring(6).Trim();
                     if (varList.IndexOf(valueToAdd) == -1)
                     {
                         varList.Add(valueToAdd);
                     }
                 }
                 if (line.ToLower().Contains("!define"))
                 {
                     line = line.Replace("\n", "");
                     line = line.Replace("\r", "");
                     line = line.Replace("{", "");
                     line = line.Replace("}", "");
                     line = line.Substring(8).Trim();
                     int    firstSpace = line.IndexOf(' ');
                     string firstPart  = line.Substring(0, firstSpace).Trim();
                     string secondPart = line.Replace(firstPart, "").Trim();
                     string valueToAdd = firstPart + "=" + secondPart;
                     if (varList.IndexOf(valueToAdd) == -1)
                     {
                         varList.Add(valueToAdd);
                     }
                 }
             }
         }
     }
     GetVariables(node.ParentNode, ref varList);
 }
Exemple #18
0
        private static void ContainsRunnableItems(ITestNode node, ref bool result)
        {
            if ((node == null) || result)
            {
                return;
            }

            foreach (ITestNode selectedNode in node)
            {
                if ((selectedNode as ISuite) != null)
                {
                    result |= (selectedNode as ISuite).IsRunnable;
                }
                else if ((selectedNode as ITestCase) != null)
                {
                    result = true;
                }

                ContainsRunnableItems(selectedNode, ref result);
            }
        }
Exemple #19
0
 public ITestNode AddChildNode(ITestNode node)
 {
     PersistanceNode.AddChildNode(node.PersistanceNode);
     return(node);
 }
 public TreeChildEnumerator(ITestNode node)
 {
     _node = node;
 }
 public TestExecutionInfo(ITestNode InNode)
 {
     FirstReadyCheckTime = PreStartTime = PostStartTime = EndTime = DateTime.MinValue;
     TestNode            = InNode;
     CancellationReason  = "";
 }
        /// <summary>
        /// Executes the provided tests. Currently tests are executed synchronously
        /// </summary>
        /// <param name="Context"></param>
        public bool ExecuteTests(TestExecutorOptions InOptions, IEnumerable <ITestNode> RequiredTests)
        {
            Options = InOptions;

            Log.Info("Preparing to start {0} automation test(s)", RequiredTests.Count());

            // install a cancel handler so we can stop parallel-for gracefully
            Action CancelHandler = delegate()
            {
                Log.Info("Cancelling Tests");
                IsCancelled = true;
            };

            Action PostCancelHandler = delegate()
            {
                HaveReceivedPostAbort = true;
            };

            Globals.AbortHandlers.Add(CancelHandler);
            Globals.PostAbortHandlers.Add(PostCancelHandler);

            StartTime       = DateTime.Now;
            CurrentTestPass = 0;

            IsRunning = true;

            List <int> FailedPassList = new List <int>();

            int MaxParallelTasks = 0;
            int MaxStartingTasks = 0;

            // sort by priority
            if (Options.Parallel > 1)
            {
                RequiredTests = RequiredTests.OrderBy(Node => Node.Priority);
            }

            for (CurrentTestPass = 0; CurrentTestPass < Options.TestLoops; CurrentTestPass++)
            {
                // do not start a pass if cancelled
                if (IsCancelled)
                {
                    break;
                }

                if (CurrentTestPass > 0)
                {
                    // if repeating tests wait a little bit. If there was a crash CR might still be
                    // processing things.
                    Thread.Sleep(10000);
                }

                DateTime StartPassTime = DateTime.Now;

                Log.Info("Starting test pass {0} of {1}", CurrentTestPass + 1, Options.TestLoops);

                // Tests that we want to run
                List <TestExecutionInfo> PendingTests = RequiredTests.Select(N => new TestExecutionInfo(N)).ToList();

                // Tests that are in the process of starting
                List <TestExecutionInfo> StartingTests = new List <TestExecutionInfo>();

                List <Thread> StartingTestThreads = new List <Thread>();

                // Tests that have been started and we're ticking/checking
                List <TestExecutionInfo> RunningTests = new List <TestExecutionInfo>();

                // Completed tests
                List <TestExecutionInfo> CompletedTests = new List <TestExecutionInfo>();

                DateTime LastUpdateMsg        = DateTime.MinValue;
                DateTime LastReadyCheck       = DateTime.MinValue;
                DateTime LastStatusUpdateTime = DateTime.MinValue;

                const double ReadyCheckPeriod   = 30.0;
                const double StatusUpdatePeriod = 60.0;

                while (CompletedTests.Count() < RequiredTests.Count() && IsCancelled == false)
                {
                    Monitor.Enter(Globals.MainLock);

                    int SecondsRunning = (int)(DateTime.Now - StartPassTime).TotalSeconds;

                    int InProgressCount = RunningTests.Count() + StartingTests.Count();

                    double TimeSinceLastReadyCheck = (DateTime.Now - LastReadyCheck).TotalSeconds;

                    // Are any tests ready to run?
                    if (InProgressCount < Options.Parallel &&
                        PendingTests.Count() > 0 &&
                        TimeSinceLastReadyCheck >= ReadyCheckPeriod)
                    {
                        TestExecutionInfo TestToStart = null;

                        List <ITestNode> TestsFailingToStart = new List <ITestNode>();

                        // find a node that can run, and
                        // find the first test that can run....
                        for (int i = 0; i < PendingTests.Count(); i++)
                        {
                            TestExecutionInfo NodeInfo = PendingTests[i];
                            ITestNode         Node     = NodeInfo.TestNode;

                            bool IsTestReady = false;

                            try
                            {
                                IsTestReady = Node.IsReadyToStart();
                            }
                            catch (System.Exception ex)
                            {
                                Log.Error("Test {0} threw an exception during ready check. Ex: {1}", Node, ex);

                                PendingTests[i]       = null;
                                NodeInfo.PreStartTime = NodeInfo.PostStartTime = NodeInfo.EndTime = DateTime.Now;
                                CompletedTests.Add(NodeInfo);
                            }

                            if (IsTestReady)
                            {
                                // if ready then take it and stop looking
                                TestToStart = NodeInfo;

                                if (NodeInfo.FirstReadyCheckTime == DateTime.MinValue)
                                {
                                    NodeInfo.FirstReadyCheckTime = DateTime.Now;
                                }
                                break;
                            }
                            else
                            {
                                // track the time that this test should have been able to run due to no other tests
                                // consuming resources (at least locally...)
                                // TODO - how can tests express resource requirements in a generic way?
                                // TODO - what about the situation where no tests can run so all FirstCheck times are set, but
                                // then a test starts and consumes all resources?
                                if (RunningTests.Count() == 0 && StartingTests.Count() == 0)
                                {
                                    if (NodeInfo.FirstReadyCheckTime == DateTime.MinValue)
                                    {
                                        NodeInfo.FirstReadyCheckTime = DateTime.Now;
                                    }

                                    double TimeWaiting = (DateTime.Now - NodeInfo.FirstReadyCheckTime).TotalSeconds;
                                    if (TimeWaiting >= Options.Wait)
                                    {
                                        Log.Warning("Test {0} has been waiting to run resource-free for {1:00} seconds. Removing from wait list", Node, TimeWaiting);
                                        PendingTests[i]       = null;
                                        NodeInfo.PreStartTime = NodeInfo.PostStartTime = NodeInfo.EndTime = DateTime.Now;
                                        NodeInfo.Result       = TestExecutionInfo.ExecutionResult.TimedOut;
                                        CompletedTests.Add(NodeInfo);
                                    }
                                }
                            }
                        }

                        // remove anything we nulled
                        PendingTests = PendingTests.Where(T => T != null).ToList();

                        if (TestToStart != null)
                        {
                            Log.Info("Test {0} is ready to run", TestToStart);

                            PendingTests.Remove(TestToStart);
                            StartingTests.Add(TestToStart);

                            // StartTest is the only thing we do on a thread because it's likely to be the most time consuming
                            // as build are copied so will get the most benefit from happening in parallel
                            Thread StartThread = new Thread(() =>
                            {
                                Thread.CurrentThread.IsBackground = true;

                                // start the test, this also fills in the pre/post start times
                                bool Started = StartTest(TestToStart, CurrentTestPass, Options.TestLoops);

                                lock (Globals.MainLock)
                                {
                                    if (Started == false)
                                    {
                                        TestToStart.PostStartTime = TestToStart.EndTime = DateTime.Now;
                                        CompletedTests.Add(TestToStart);
                                        Log.Error("Test {0} failed to start", TestToStart);
                                    }
                                    else
                                    {
                                        RunningTests.Add(TestToStart);
                                    }

                                    StartingTests.Remove(TestToStart);
                                    StartingTestThreads.Remove(Thread.CurrentThread);
                                }
                            });

                            if (StartingTests.Count > MaxStartingTasks)
                            {
                                MaxStartingTasks = StartingTests.Count;
                            }

                            // track the thread and start it
                            StartingTestThreads.Add(StartThread);
                            StartThread.Start();
                        }
                        else
                        {
                            // don't check for a while as querying kits for availability can be expensive
                            LastReadyCheck = DateTime.Now;
                        }
                    }

                    // Tick all running tests
                    foreach (TestExecutionInfo TestInfo in RunningTests)
                    {
                        TestResult Result = TickTest(TestInfo);

                        // invalid = no result yet
                        if (Result == TestResult.Invalid)
                        {
                            TimeSpan RunningTime = DateTime.Now - TestInfo.PostStartTime;

                            if ((SecondsRunning % 60) == 0)
                            {
                                Log.Verbose("Test {0} is still running. {1:00} seconds elapsed, will timeout in {2:00} seconds",
                                            TestInfo,
                                            RunningTime.TotalSeconds,
                                            TestInfo.TestNode.MaxDuration - RunningTime.TotalSeconds);

                                LastUpdateMsg = DateTime.Now;
                            }
                        }
                        else
                        {
                            TestInfo.EndTime = DateTime.Now;
                            TestInfo.Result  = Result == TestResult.Passed ? TestExecutionInfo.ExecutionResult.Passed : TestExecutionInfo.ExecutionResult.Failed;
                            CompletedTests.Add(TestInfo);
                        }
                    }

                    // remove any tests that were completed
                    RunningTests = RunningTests.Where(R => CompletedTests.Contains(R) == false).ToList();

                    if ((DateTime.Now - LastStatusUpdateTime).TotalSeconds >= StatusUpdatePeriod)
                    {
                        LastStatusUpdateTime = DateTime.Now;
                        Log.Info("Status: Completed:{0}, Running:{1}, Starting: {2}, Waiting:{3}",
                                 CompletedTests.Count(), RunningTests.Count(), StartingTests.Count(), PendingTests.Count());
                    }

                    if (InProgressCount > MaxParallelTasks)
                    {
                        MaxParallelTasks = RunningTests.Count();
                    }

                    // Release our global lock before we loop
                    Monitor.Exit(Globals.MainLock);

                    // sleep a while before we tick our running tasks again
                    Thread.Sleep(500);
                }

                if (IsCancelled)
                {
                    DateTime StartTime = DateTime.Now;
                    Log.Info("Cleaning up pending and running tests.");
                    while (HaveReceivedPostAbort == false)
                    {
                        Thread.Sleep(500);
                        double Elapsed = (DateTime.Now - StartTime).TotalSeconds;

                        if (Elapsed >= 5)
                        {
                            Log.Error("Giving up waiting for tests after {0:00} seconds", Elapsed);
                            break;
                        }
                    }

                    // tick anything running, this will also check IsCancelled and stop them
                    // forcibly kill anything waiting
                    if (StartingTestThreads.Count > 0)
                    {
                        foreach (Thread T in StartingTestThreads)
                        {
                            Log.Info("Aborting startup thread");
                            T.Abort();
                        }
                        Thread.Sleep(1000);
                    }

                    foreach (TestExecutionInfo TestInfo in StartingTests)
                    {
                        Log.Info("Forcing pending test {0} to run CleanupTest", TestInfo.TestNode.Name);
                        TestInfo.TestNode.CleanupTest();
                        CompletedTests.Add(TestInfo);
                    }

                    foreach (TestExecutionInfo TestInfo in RunningTests)
                    {
                        Log.Info("Ticking test {0} to cancel", TestInfo.TestNode.Name);
                        TestResult Res = TickTest(TestInfo);
                        CompletedTests.Add(TestInfo);

                        if (Res != TestResult.Failed)
                        {
                            Log.Warning("Ticking of cancelled test {0} returnd {1}", TestInfo.TestNode.Name, Res);
                        }
                    }
                }
                else
                {
                    TimeSpan PassDuration = DateTime.Now - StartPassTime;

                    int FailedCount = 0;
                    int TestCount   = CompletedTests.Count;

                    CompletedTests.ForEach(T =>
                    {
                        TimeSpan TimeWaiting  = T.FirstReadyCheckTime - T.PreStartTime;
                        TimeSpan SetupTime    = T.PostStartTime - T.PreStartTime;
                        TimeSpan TestDuration = T.EndTime - T.PostStartTime;

                        // status msg, kept uniform to avoid spam on notifiers (ie. don't include timestamps, etc)
                        string Msg = string.Format("Test {0} {1}", T.TestNode, T.Result);

                        if (T.Result != TestExecutionInfo.ExecutionResult.Passed)
                        {
                            FailedCount++;
                        }

                        Log.Info(Msg);

                        // log test timing to info
                        Log.Info(string.Format("Test Time: {0:mm\\:ss} (Waited:{1:mm\\:ss}, Setup:{2:mm\\:ss})", TestDuration, TimeWaiting, SetupTime));
                    });

                    if (Options.Parallel > 1)
                    {
                        Log.Info("MaxParallelTasks: {0}", MaxParallelTasks);
                        Log.Info("MaxStartingTasks: {0}", MaxStartingTasks);
                    }

                    // report all tests
                    ReportMasterSummary(CurrentTestPass + 1, Options.TestLoops, PassDuration, CompletedTests);

                    if (FailedCount > 0)
                    {
                        FailedPassList.Add(CurrentTestPass);

                        if (Options.StopOnError)
                        {
                            break;
                        }
                    }
                }

                // show details for multi passes
                if (Options.TestLoops > 1)
                {
                    Log.Info("Completed all passes. {0} of {1} completed without error", CurrentTestPass + 1 - FailedPassList.Count(), Options.TestLoops);

                    if (FailedPassList.Count > 0)
                    {
                        string FailedList = string.Join(",", FailedPassList);
                        Log.Warning("Failed passes: " + FailedList);
                    }
                }
            }

            IsRunning = false;

            Globals.AbortHandlers.Remove(CancelHandler);
            Globals.PostAbortHandlers.Remove(PostCancelHandler);

            return(FailedPassList.Count == 0 && !IsCancelled);
        }
 private IModelNode GetWrapperNode([NotNull] ITestNode testNode)
 {
     return(_modelService.LatestModel.Nodes.Single(i => i.Payload.Equals(testNode)));
 }
 public IModelNode GetUnderlyingNode(ITestNode node) => GetWrapperNode(node);
 private void AddInheritance(ITestNode node, ITestNode baseNode)
 {
     AddRelationship(node, baseNode, ModelRelationshipStereotypes.Inheritance);
 }
Exemple #26
0
 public void Run(ITestNode node)
 {
 }
 private void AddImplements(ITestNode node, ITestNode baseNode)
 {
     AddRelationship(node, baseNode, ModelRelationshipStereotypes.Implementation);
 }