Beispiel #1
0
        internal bool KillEmulator()
        {
            bool emCrash = false;

            Harness.SetDeviceDoneEvent();
            foreach (Process p in Process.GetProcessesByName("Microsoft.SPOT.Emulator.Sample.SampleEmulator"))
            {
                p.Kill();
                emCrash = true;
            }

            return(emCrash);
        }
            internal HarnessExecutionResult Run(string[] args)
            {
                TimedTest test;
                HarnessExecutionResult hResult = HarnessExecutionResult.Unavailable;
                bool runTestsIndividually = false;
                m_log = new XmlLog();

                // Prelim: Set the env, parse the arguments, set the result paths and set the test list.
                SetEnvironment();
                if (args != null && args.Length > 0)
                {
                    if (!ParseArguments(args, ref runTestsIndividually))
                    {
                        return HarnessExecutionResult.InvalidArguments;
                    }
                }
                SetResultPaths();
                BaseTest[] testList = BuildTestList(runTestsIndividually);

                // Create a new harness object and set the properties.
                Harness harness = new Harness(IsDevEnvironment);
                if (m_transport != null)
                {
                    harness.Transport = m_transport;
                }
                else
                {
                    // harness constructor assigns default transport
                    m_transport = harness.Transport;
                }
                if (m_device != null)
                {
                    harness.Device = m_device;
                }
                else
                {
                    // harness constructor assigns default device
                    m_device = harness.Device;
                }

                // Execute each of the solution files using Harness.
                for (int i = 0; i < testList.Length; i++)
                {
                    if (testList[i] == null)
                    {
                        continue;
                    }

                    if (this.Transport.ToLower().Contains("tcpip") &&
                        (testList[i].Name.ToLower().Contains("netinfotests.sln")))
                    {
                        continue;
                    }

                    
                    hResult = HarnessExecutionResult.Unavailable;
                    int attempts = 0;
                    while ((hResult != HarnessExecutionResult.Success &&
                        hResult != HarnessExecutionResult.Abort) && attempts++ < 3)
                    {
                        test = new TimedTest(testList[i], harness, m_log);

                        // Kill any emulators running from previous runs.
                        TerminateRunningEmulators(test, testList[i]);

                        try
                        {
                            hResult = test.Execute();

                            if (hResult == HarnessExecutionResult.Unavailable)
                            {
                                Utils.WriteToEventLog("Harness returned an unavailable result after running the test: " +
                                    testList[i].Name + ". No of tries so far = " + attempts);
                                string deviceStatus = DeviceStatus(hResult);
                                Utils.WriteToEventLog("Device status after unavailable from harness: " + deviceStatus);
                            }

                            // Test did not execute because the device was dead.
                            // If so, reset power to the device and re-run test.
                            if (hResult == HarnessExecutionResult.NoConnection)
                            {
                                Utils.WriteToEventLog("Harness returned an NoConnection result after running the test: " +
                                    testList[i].Name + ". No of tries so far = " + attempts);
                                string deviceStatus = DeviceStatus(hResult);
                                Utils.WriteToEventLog("Device status after noconnection from harness: " + deviceStatus);                                
                            }

                            // Test did not succeed running in three attempts.
                            if (hResult == HarnessExecutionResult.TimeOut)
                            {
                                Utils.WriteToEventLog("Test: " + test.Test.Name + " failed.");
                                harness.MFTestResult = Harness.Result.Fail;
                                GetTestResultDetails(harness, testList[i]);
                                Console.WriteLine("Test Result: " + harness.MFTestResult);
                                test.SendMail();
                                break;
                            }

                            // Test did not succeed running in three attempts.
                            if (hResult != HarnessExecutionResult.Success && attempts >= 3)
                            {
                                Utils.WriteToEventLog("Test: " + test.Test.Name + " failed.");
                                harness.MFTestResult = Harness.Result.Fail;
                                GetTestResultDetails(harness, testList[i]);
                                Console.WriteLine("Test Result: " + harness.MFTestResult);
                            }

                            // Test succeeded with 3 attempts or an abort was sent by harness.
                            if ((hResult == HarnessExecutionResult.Success && attempts < 4) ||
                                (hResult == HarnessExecutionResult.Abort))
                            {
                                GetTestResultDetails(harness, testList[i]);
                                if (!string.IsNullOrEmpty(m_device))
                                {
                                    string deviceStatus = DeviceStatus(hResult);                                    
                                    Utils.WriteToEventLog("Device status after running " + testList[i].Name 
                                        + ": " + deviceStatus);
                                    if (!IsProfilerRun)
                                    {
                                        m_log.AddDeviceStatusToLog("Device ping result after running "
                                            + testList[i].Name + ":  " + deviceStatus);
                                    }
                                    if (string.Equals(deviceStatus.ToLower(), "noconnection"))
                                    {
                                        throw new ApplicationException("Device did not reboot correctly after " +
                                            "running the test: " + testList[i].Name);
                                    }
                                }

                                if (!IsProfilerRun)
                                {
                                    Console.WriteLine("Test Result: " + harness.MFTestResult);
                                }
                            }
                        }
                        catch (Exception ex)
                        {
                            if (ex is FileNotFoundException)
                            {
                                if (!IsProfilerRun)
                                {
                                    Console.WriteLine(ex.ToString());
                                    try
                                    {
                                        m_log.AddCommentToLog(ex.ToString());
                                    }
                                    catch
                                    {
                                    }

                                    Utils.WriteToEventLog(
                                        string.Format("Exception in TestSystem.cs: {0}", ex.ToString()));
                                    hResult = HarnessExecutionResult.Abort;
                                }
                            }
                        }

                        // Wait for a few seconds before starting the next test when running on devices.
                        if (!string.Equals(m_transport.ToLower(), "emulator"))
                        {
                            System.Threading.Thread.Sleep(5000);
                        }
                    }
                }

                // Update test results and logs location.
                m_didAllTestsPass = ((tests.FailCount > 0) || (tests.PassCount == 0)) ? false : true;
                UpdateLogFolder();
                return hResult;
            }
            private void UpdateTestDetails(Harness harness, BaseTest currentTest, 
                Harness.Result mfResult, int totalTestMethods, XmlDocument doc)
            {
                string[] seperator = { "::" };
                try
                {
                    currentTest.EndTime = harness.EndTime;
                    currentTest.LogFile = harness.LogFile;
                    currentTest.Result = mfResult.ToString();
                    currentTest.StartTime = harness.StartTime;

                    if (m_isProfilerRun)
                    {
                        ArrayList methodList = Utils.ReadProfilerLogFile(currentTest.LogFile, currentTest.ExeLocation);

                        for (int i = 0; i < methodList.Count; i++)
                        {
                            string[] vals = methodList[i].ToString().Split('\t');

                            int exclTime = 0;
                            if (!string.IsNullOrEmpty(vals[3]))
                            {
                                exclTime = Convert.ToInt32(vals[3].Trim());
                            }

                            int inclTime = 0;
                            if (!string.IsNullOrEmpty(vals[4]))
                            {
                                inclTime = Convert.ToInt32(vals[4].Trim());
                            }

                            ProfilerTestMethod ptm = new ProfilerTestMethod();
                            ptm.TestMethod = vals[2].Split(seperator, StringSplitOptions.None)[1].Trim();                            
                            ptm.InclusiveTime = inclTime;
                            ptm.ExclusiveTime = exclTime;

                            currentTest.TestMethods.Add(ptm);
                        }
                    }
                    else
                    {
                        // Update specific results count.
                        switch (mfResult)
                        {
                            case Harness.Result.Pass:
                                m_passCount++;
                                break;

                            case Harness.Result.Fail:
                                m_failCount++;
                                break;

                            case Harness.Result.Skip:
                                m_skipCount++;
                                break;

                            case Harness.Result.KnownFailure:
                                m_knownFailCount++;
                                break;
                        }

                        int pc = 0, fc = 0, sc = 0, kc = 0;
                        try
                        {
                            // Get total passcount, failcount, skipcount and known failure count for the test
                            XmlNodeList passNodes = doc.GetElementsByTagName("PassCount");
                            foreach (XmlNode passNode in passNodes)
                            {
                                pc += Convert.ToInt32(passNode.ChildNodes[0].InnerText);
                            }

                            XmlNodeList failNodes = doc.GetElementsByTagName("FailCount");
                            foreach (XmlNode failNode in failNodes)
                            {
                                fc += Convert.ToInt32(failNode.ChildNodes[0].InnerText);
                            }

                            XmlNodeList skipNodes = doc.GetElementsByTagName("SkipCount");
                            foreach (XmlNode skipNode in skipNodes)
                            {
                                sc += Convert.ToInt32(skipNode.ChildNodes[0].InnerText);
                            }

                            XmlNodeList knownFailNodes = doc.GetElementsByTagName("KnownFailureCount");
                            foreach (XmlNode knownFailNode in knownFailNodes)
                            {
                                kc += Convert.ToInt32(knownFailNode.ChildNodes[0].InnerText);
                            }
                        }
                        catch
                        {
                        }

                        currentTest.TestMethodFailCount = fc;
                        currentTest.TestMethodKnownFailureCount = kc;
                        currentTest.TestMethodPassCount = pc;
                        currentTest.TestMethodSkipCount = sc;
                        currentTest.TotalTestCases = totalTestMethods;
                    }

                    m_mfTestList.Add(currentTest);
                }
                catch (Exception ex)
                {
                    Console.WriteLine("WARNING: " + ex.ToString());
                }
            }
            private void GetTestResultDetails(Harness harness, BaseTest currentTest)
            {
                // Get the number of test methods in the log file for the test.
                // Open the xml file and get a count of number of <TestMethod> sections.                        
                int totalTestMethods = 0;
                XmlDocument doc = new XmlDocument();
                Harness.Result mfResult = Harness.Result.NotKnown;

                if (!m_isProfilerRun)
                {
                    try
                    {
                        doc.Load(m_logStore + harness.LogFile);
                        XmlNodeList testLogNodes = doc.SelectNodes("/SPOT_Platform_Test/TestLog");
                        foreach (XmlNode tlNode in testLogNodes)
                        {
                            XmlNodeList testMethodNodes = tlNode.SelectNodes("TestMethod");
                            totalTestMethods += testMethodNodes.Count;
                        }
                    }
                    catch (XmlException)
                    {
                        harness.TestResults = Harness.Result.Fail;
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine("WARNING: " + ex.ToString());
                    }

                    try
                    {
                        mfResult = harness.MFTestResult;
                    }
                    catch
                    {
                        mfResult = Harness.Result.Fail;
                    }
                }
             
                UpdateTestDetails(harness, currentTest, mfResult, totalTestMethods, doc);

                // Add to totalTestCases count.
                // This will give you the number of test cases executed during the entire run in all the tests.
                m_totalTestCases += totalTestMethods;
            }
 /// <summary>
 /// Overloaded Constructor 
 /// </summary>
 /// <param name="test">The name of the sln file.</param>
 /// <param name="harness">The Harness instance.</param>
 /// <param name="log">Path to log file. </param>
 internal TimedTest(BaseTest test, Harness harness, XmlLog log)
 {
     this.m_test = test;
     this.m_harness = harness;
     this.m_log = log;
 }
Beispiel #6
0
 /// <summary>
 /// Overloaded Constructor
 /// </summary>
 /// <param name="test">The name of the sln file.</param>
 /// <param name="harness">The Harness instance.</param>
 /// <param name="log">Path to log file. </param>
 internal TimedTest(BaseTest test, Harness harness, XmlLog log)
 {
     this.m_test    = test;
     this.m_harness = harness;
     this.m_log     = log;
 }