public void StartTests(string trackId, TestCoreMessages.StationType stationType)
        {
            //Do POST of this trackid
            if (isPostEnable)
            {
                MQS.Post();
            }

            TestCoreRunner tcr = new TestCoreRunner(this, trackId, stationType);

            tcr.RunTestsToStation();
        }
Esempio n. 2
0
        private static void isClosedJig(TestCoreRunner runner)
        {
            if (!runner.tcc.Jig.IsJigClosed())
            {
                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.JIG_NOT_CLOSED));

                while (!runner.tcc.Jig.IsJigClosed())
                {
                    Thread.Sleep(1000);
                    runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, ". ");
                }

                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.WARNING, TestCoreMessages.ParseMessages(TestCoreMessages.JIG_CLOSED));
            }
        }
Esempio n. 3
0
        private static void run(List <TestCaseBase> testCases, TestCoreRunner runner)
        {
            bool hasFailTests = false;

            runner.tcc.Andon.SetState(Domain.Andon.State.OFF);

            try
            {
                //AdbManager.installSambaAppIfNeeded(runner.PrintModInfo);
                //runner.tcc.Mod.GetModInfo(runner.PrintModInfo);

                //Check if have match ModTrackId with scanned trackId.
                if (!runner.modTrackId.Equals(runner.trackId))
                {
                    //runner.tcc.runTest = false;
                    //throw new Exception("No match ModTrackId with scanned trackId.");
                }
            }
            catch (Exception ex)
            {
                runner.tcc.Andon.SetState(Domain.Andon.State.FAIL);
                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, ex.Message);
                hasFailTests = true;
                runner.tcc.SetLogNameTo(hasFailTests ? "FAIL" : "PASS");

                TestCaseBase.SaveTestResultListToFile(runner.tcc.newLogFileLocation);

                return;
            }

            TestCaseBase.TestResultList.Clear();

            int    totalTestCases = (testCases.Count) * 3; // *3 because we have Prepare, Execute and Evaluate Results
            double percentByTest  = 100 / totalTestCases;
            double currentPercent = 0;

            foreach (TestCaseBase item in testCases)
            {
                isClosedJig(runner);

                if (!runner.tcc.runTest)
                {
                    runner.tcc.Andon.SetState(Domain.Andon.State.FAIL);
                    hasFailTests = true;
                    runner.tcc.SetLogNameTo("CANCELED");
                    TestCaseBase.SaveTestResultListToFile(runner.tcc.newLogFileLocation);
                    return;
                }

                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.WARNING, "==================================================\n");

                int result = -1;

                //Preparing...
                currentPercent = currentPercent + percentByTest;
                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.UPDATE_TEST_PERCENTUAL, (string.Format("{0:0}", (currentPercent))));
                if (!TryExecute(item.Prepare, item.Timeout, out result))
                {
                    runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.REACHED_TIMEOUT) + " => " + item.Timeout + " ms.");
                }
                if (result != TestCoreMessages.SUCCESS)
                {
                    hasFailTests = true;
                    continue;
                }

                //Executing...
                currentPercent = currentPercent + percentByTest;
                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.UPDATE_TEST_PERCENTUAL, (string.Format("{0:0}", (currentPercent))));
                if (!TryExecute(item.Execute, item.Timeout, out result))
                {
                    runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.REACHED_TIMEOUT) + " => " + item.Timeout + " ms.");
                }
                if (result != TestCoreMessages.SUCCESS)
                {
                    hasFailTests = true;
                    continue;
                }

                //Evaluating...
                currentPercent = currentPercent + percentByTest;
                runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.UPDATE_TEST_PERCENTUAL, (string.Format("{0:0}", (currentPercent))));
                if (!TryExecute(item.EvaluateResults, item.Timeout, out result))
                {
                    runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.REACHED_TIMEOUT) + " => " + item.Timeout + " ms.");
                }
                if (result != TestCoreMessages.SUCCESS)
                {
                    hasFailTests = true;
                    continue;
                }

                //Set andon state
                if (item.ResulTest == TestCaseBase.TestEvaluateResult.PASS)
                {
                    runner.tcc.Andon.SetState(Domain.Andon.State.PASS);
                }
                else
                {
                    runner.tcc.Andon.SetState(Domain.Andon.State.FAIL);
                    hasFailTests = true;
                }
            }

            //Change log name and save it.
            runner.tcc.SetLogNameTo(hasFailTests ? "FAIL" : "PASS");
            TestCaseBase.SaveTestResultListToFile(runner.tcc.newLogFileLocation);

            //Execute LogResult
            if (Boolean.Parse(runner.tcc.GetValueConfiguration("SETTINGS", "LOG_RESULT_ENABLE").ToLower()))
            {
                runner.tcc.MQS.LogResult(hasFailTests ? "FAIL" : "PASS");
            }
        }