예제 #1
0
            private void AddTest(ref BaseTest[] list, ref int index, string slnPath)
            {
                if (m_isProfilerRun)
                {
                    list[index] = new ProfilerTest();
                }
                else
                {
                    list[index] = new MicroFrameworkTest();
                }

                list[index].Name = slnPath;
                list[index++].Location = slnPath;
            }
예제 #2
0
            private BaseTest[] BuildSolutionFileList(string dirProjPath)
            {
                // Scan build.dirproj to generate the test list.                                               
                //List<string> staticList = new List<string>();
                ArrayList test_list = new ArrayList();
                if (m_isStaticList)
                {
                    foreach (string file in File.ReadAllLines(m_staticList))
                    {
                        //staticList.Add(file.ToLower());
                        test_list.Add(file.ToLower());
                    }
                }
                else
                {
                    test_list = BuildTestList(dirProjPath, new ArrayList());

                    // Add the DPWS fixture tests which are built by the build system to the test list.
                    string dpwsFixtureTestPath = "";

                    string flav = Environment.GetEnvironmentVariable("FLAVOR");
                    
                    if (string.IsNullOrEmpty(flav))
                    {
                        flav = "Release";
                    }

                    dpwsFixtureTestPath = string.Format(@"{0}\BuildOutput\public\{1}\test\server\dpws",
                        Environment.GetEnvironmentVariable("SPOCLIENT"),
                        flav);

                    if (!Directory.Exists(dpwsFixtureTestPath))
                    {
                        if (flav.ToLower() == "debug") flav = "Release";
                        else flav = "Debug";

                        dpwsFixtureTestPath = string.Format(@"{0}\BuildOutput\public\{1}\test\server\dpws",
                            Environment.GetEnvironmentVariable("SPOCLIENT"),
                            flav);
                    }

                    if (Directory.Exists(dpwsFixtureTestPath))
                    {
                        FileInfo[] dpwsSlnFiles = new DirectoryInfo(dpwsFixtureTestPath).GetFiles("*.sln");
                        foreach (FileInfo dpwsSlnFile in dpwsSlnFiles)
                        {
                            test_list.Add(dpwsSlnFile.FullName);
                        }
                    }
                }

                // Copy the contents of the local arraylist to the string array.
                BaseTest[] list = new BaseTest[test_list.Count * m_runCount];
                for (int rc = 0; rc < m_runCount; rc++)
                {
                    for (int i = 0; i < list.Length; i++)
                    {
                        //if (m_isStaticList)
                        //{
                        //    string test = Path.GetFileName(test_list[i].ToString());
                        //    if (!staticList.Contains(test))
                        //        continue;
                        //}
                        if (m_isProfilerRun)
                        {
                            list[i] = new ProfilerTest();
                        }
                        else
                        {
                            list[i] = new MicroFrameworkTest();
                        }
                        list[i].Name = test_list[i].ToString();
                        list[i].Location = test_list[i].ToString();
                    }
                }

                return list;
            }
예제 #3
0
            private BaseTest[] BuildTestList(bool runTestsIndividually)
            {
                DirectoryInfo[] dirs;
                BaseTest[] list;

                if (runTestsIndividually)
                {
                    list = new BaseTest[m_runCount];

                    for (int i = 0; i < m_runCount; i++)
                    {
                        if (m_isProfilerRun)
                        {
                            list[i] = new ProfilerTest();
                        }
                        else
                        {
                            list[i] = new MicroFrameworkTest();
                        }
                        list[i].Name = m_test;
                        list[i].Location = m_test;
                    }
                }
                else
                {
                    // The machine is a test box with the sdk installation and not a dev box.
                    if (!IsDevEnvironment)
                    {
                        if (m_isProfilerRun)
                        {
                            dirs = new DirectoryInfo(string.Format("{0}ManagedProfilerTests",
                                InstallRoot)).GetDirectories();
                        }
                        else
                        {
                            dirs = new DirectoryInfo(string.Format("{0}TestCases",
                                InstallRoot)).GetDirectories();
                        }

                        list = BuildSolutionFileList(dirs);
                    }
                    else
                    {
                        // The machine is a dev box.
                        if (m_isProfilerRun)
                        {
                            dirs = new DirectoryInfo(string.Format(@"{0}Tests\Performance\ProfilerTests",
                                InstallRoot)).GetDirectories();
                            list = BuildSolutionFileList(dirs);
                        }
                        else
                        {
                            list = BuildSolutionFileList(InstallRoot + "build.dirproj");
                        }
                    }
                }

                return list;
            }
예제 #4
0
            private BaseTest[] BuildSolutionFileList(DirectoryInfo[] dirs)
            {
                int index = 0;
                BaseTest[] list = new BaseTest[dirs.Length * m_runCount];

                for (int i = 0; i < m_runCount; i++)
                {
                    foreach (DirectoryInfo dir in dirs)
                    {
                        // The installed location will have only one sln file per test.
                        FileInfo[] slnFiles = dir.GetFiles("*.sln");
                        if (slnFiles.Length > 0)
                        {
                            string slnPath = dir.Name + "\\" + slnFiles[0].Name;
                            if (!string.IsNullOrEmpty(slnPath))
                            {
                                // Add only the tests specified in the testlist if one is supplied.
                                if (!m_isStaticList)
                                {
                                    AddTest(ref list, ref index, slnPath);
                                }
                                else
                                {
                                    List<string> staticList = new List<string>();
                                    foreach (string file in File.ReadAllLines(m_staticList))
                                    {
                                        if (string.Equals(slnFiles[0].Name, Path.GetFileName(file),
                                            StringComparison.InvariantCultureIgnoreCase))
                                        {
                                            AddTest(ref list, ref index, slnPath);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                return list;
            }
예제 #5
0
            private void GetTestResultDetails(Harness harness, BaseTest currentTest)
            {
                // Get the number of test methods in the log file for the test.
                // Open the xml file and get a count of number of <TestMethod> sections.                        
                int totalTestMethods = 0;
                XmlDocument doc = new XmlDocument();
                Harness.Result mfResult = Harness.Result.NotKnown;

                if (!m_isProfilerRun)
                {
                    try
                    {
                        doc.Load(m_logStore + harness.LogFile);
                        XmlNodeList testLogNodes = doc.SelectNodes("/SPOT_Platform_Test/TestLog");
                        foreach (XmlNode tlNode in testLogNodes)
                        {
                            XmlNodeList testMethodNodes = tlNode.SelectNodes("TestMethod");
                            totalTestMethods += testMethodNodes.Count;
                        }
                    }
                    catch (XmlException)
                    {
                        harness.TestResults = Harness.Result.Fail;
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine("WARNING: " + ex.ToString());
                    }

                    try
                    {
                        mfResult = harness.MFTestResult;
                    }
                    catch
                    {
                        mfResult = Harness.Result.Fail;
                    }
                }
             
                UpdateTestDetails(harness, currentTest, mfResult, totalTestMethods, doc);

                // Add to totalTestCases count.
                // This will give you the number of test cases executed during the entire run in all the tests.
                m_totalTestCases += totalTestMethods;
            }
예제 #6
0
            private void UpdateTestDetails(Harness harness, BaseTest currentTest, 
                Harness.Result mfResult, int totalTestMethods, XmlDocument doc)
            {
                string[] seperator = { "::" };
                try
                {
                    currentTest.EndTime = harness.EndTime;
                    currentTest.LogFile = harness.LogFile;
                    currentTest.Result = mfResult.ToString();
                    currentTest.StartTime = harness.StartTime;

                    if (m_isProfilerRun)
                    {
                        ArrayList methodList = Utils.ReadProfilerLogFile(currentTest.LogFile, currentTest.ExeLocation);

                        for (int i = 0; i < methodList.Count; i++)
                        {
                            string[] vals = methodList[i].ToString().Split('\t');

                            int exclTime = 0;
                            if (!string.IsNullOrEmpty(vals[3]))
                            {
                                exclTime = Convert.ToInt32(vals[3].Trim());
                            }

                            int inclTime = 0;
                            if (!string.IsNullOrEmpty(vals[4]))
                            {
                                inclTime = Convert.ToInt32(vals[4].Trim());
                            }

                            ProfilerTestMethod ptm = new ProfilerTestMethod();
                            ptm.TestMethod = vals[2].Split(seperator, StringSplitOptions.None)[1].Trim();                            
                            ptm.InclusiveTime = inclTime;
                            ptm.ExclusiveTime = exclTime;

                            currentTest.TestMethods.Add(ptm);
                        }
                    }
                    else
                    {
                        // Update specific results count.
                        switch (mfResult)
                        {
                            case Harness.Result.Pass:
                                m_passCount++;
                                break;

                            case Harness.Result.Fail:
                                m_failCount++;
                                break;

                            case Harness.Result.Skip:
                                m_skipCount++;
                                break;

                            case Harness.Result.KnownFailure:
                                m_knownFailCount++;
                                break;
                        }

                        int pc = 0, fc = 0, sc = 0, kc = 0;
                        try
                        {
                            // Get total passcount, failcount, skipcount and known failure count for the test
                            XmlNodeList passNodes = doc.GetElementsByTagName("PassCount");
                            foreach (XmlNode passNode in passNodes)
                            {
                                pc += Convert.ToInt32(passNode.ChildNodes[0].InnerText);
                            }

                            XmlNodeList failNodes = doc.GetElementsByTagName("FailCount");
                            foreach (XmlNode failNode in failNodes)
                            {
                                fc += Convert.ToInt32(failNode.ChildNodes[0].InnerText);
                            }

                            XmlNodeList skipNodes = doc.GetElementsByTagName("SkipCount");
                            foreach (XmlNode skipNode in skipNodes)
                            {
                                sc += Convert.ToInt32(skipNode.ChildNodes[0].InnerText);
                            }

                            XmlNodeList knownFailNodes = doc.GetElementsByTagName("KnownFailureCount");
                            foreach (XmlNode knownFailNode in knownFailNodes)
                            {
                                kc += Convert.ToInt32(knownFailNode.ChildNodes[0].InnerText);
                            }
                        }
                        catch
                        {
                        }

                        currentTest.TestMethodFailCount = fc;
                        currentTest.TestMethodKnownFailureCount = kc;
                        currentTest.TestMethodPassCount = pc;
                        currentTest.TestMethodSkipCount = sc;
                        currentTest.TotalTestCases = totalTestMethods;
                    }

                    m_mfTestList.Add(currentTest);
                }
                catch (Exception ex)
                {
                    Console.WriteLine("WARNING: " + ex.ToString());
                }
            }
예제 #7
0
 /// <summary>
 /// Overloaded Constructor 
 /// </summary>
 /// <param name="test">The name of the sln file.</param>
 /// <param name="harness">The Harness instance.</param>
 /// <param name="log">Path to log file. </param>
 internal TimedTest(BaseTest test, Harness harness, XmlLog log)
 {
     this.m_test = test;
     this.m_harness = harness;
     this.m_log = log;
 }
예제 #8
0
            private void TerminateRunningEmulators(TimedTest test, BaseTest bTest)
            {
                if (string.Equals(m_transport.ToLower(), "emulator"))
                {
                    try
                    {
                        test.KillEmulator();
                    }
                    catch (Exception ex)
                    {
                        Utils.WriteToEventLog("An exception was thrown when killing the emulator "
                            + "before executing " + bTest.Name + " : " + ex.ToString());
                    }
                }

                string[] onboardFlashes = new string[] { 
                Path.Combine(Path.GetDirectoryName(bTest.Location)   , "OnBoardFlash.dat"), 
                Path.Combine(Directory.GetCurrentDirectory()         , "OnBoardFlash.dat") };

                foreach (string onboardFlash in onboardFlashes)
                {
                    if (File.Exists(onboardFlash))
                    {
                        try
                        {
                            File.Delete(onboardFlash);
                        }
                        catch
                        {
                        }
                    }
                }
            }
예제 #9
0
        private string BuildFilePath(BaseTest test, ref string xslPath)
        {
            string toolSrcPath        = @"\test\Platform\";
            string srcPath            = @"\test\Platform\Tests\";
            string enlistmentPath     = GetEnlistmentPath;
            bool   isTestMsiInstalled = IsTestMsiInstalled;
            string file = string.Empty;

            xslPath = "Results.xsl";

            if (!String.IsNullOrEmpty(test.Name))
            {
                // If dev environment, use the test files from the enlistment.
                // Else If the test msi is installed, use the test files from the installed location.
                // Else, neither enlistment exists nor test msi is installed - throw a file not found exception.
                if (m_isDevEnvironment)
                {
                    if (!Path.IsPathRooted(test.Name))
                    {
                        test.Name = Path.Combine(Environment.GetEnvironmentVariable("SPOCLIENT"), test.Name);
                    }

                    if (File.Exists(test.Name))
                    {
                        file = Path.GetFullPath(test.Name);
                    }
                    else
                    {
                        if (test is ProfilerTest)
                        {
                            file = enlistmentPath + srcPath + @"\Performance\ProfilerTests\" + test.Name;
                        }
                        else
                        {
                            file = enlistmentPath + srcPath + test.Name;
                        }
                    }

                    if (!File.Exists(file))
                    {
                        throw new FileNotFoundException();
                    }

                    // Set the log folder path.
                    m_mfLogDirectory = enlistmentPath + toolSrcPath + @"Tools\MFTestSystem\Results";
                }
                else if (isTestMsiInstalled)
                {
                    m_mfLogDirectory = m_mfLogDirectory = string.Format("{0}{1}\\{2}\\",
                                                                        m_installPath, "Results", TestSystem.TestResultId);

                    if (test is ProfilerTest)
                    {
                        srcPath = string.Format("{0}{1}", m_installPath, @"ManagedProfilerTests\");
                    }
                    else
                    {
                        // If the file doesn't exist assume that the path is not an absolute path.
                        srcPath     = string.Format("{0}{1}", m_installPath, @"TestCases\");
                        toolSrcPath = string.Format("{0}{1}", m_installPath, @"Tools\");
                    }

                    if (!File.Exists(test.Location))
                    {
                        file = srcPath + test.Location;
                    }
                    else
                    {
                        file = test.Location;
                    }
                }
                else
                {
                    throw new System.Exception("Could not find the tests. Please install the test msi");
                }

                if (!File.Exists(file))
                {
                    Console.WriteLine("\tERROR: File not found: " + file);
                    Utils.WriteToEventLog("File Not Found: " + file);
                    throw new System.ArgumentException(
                              "Specified file " + file + " could not be found.");
                }
            }

            test.Location = file;
            return(file);
        }
예제 #10
0
        internal HarnessExecutionResult Run(BaseTest test, XmlLog log)
        {
            m_initialTime = new DateTime();
            string file           = string.Empty;
            string csprojFilePath = string.Empty;

            m_logFileSaved = false;
            m_debugText    = new StringBuilder();
            Thread desktopThread             = null;
            NamedPipeServerStream pipeStream = null;

            try
            {
                // Set the file paths.
                file = BuildFilePath(test, ref m_xslPath);
                Console.WriteLine("\nTest: " + test.Name);
                string pathForFile = file.Substring(0, file.LastIndexOf(@"\"));
                if (!pathForFile.EndsWith(@"\"))
                {
                    pathForFile = string.Format(@"{0}\", pathForFile);
                }
                string slnFileName = test.Name.Substring(test.Name.LastIndexOf(@"\") + 1);

                try
                {
                    // Get reference list and build the test
                    ArrayList referenceList = GetProjectReferences(file, ref csprojFilePath);
                    if (m_currentAppType != TestType.DeviceDesktop)
                    {
                        BuildTest(csprojFilePath, m_currentAppType);
                    }
                    else
                    {
                        pipeStream = new
                                     NamedPipeServerStream("MFHarnessPipe", PipeDirection.InOut, 1,
                                                           PipeTransmissionMode.Message, PipeOptions.WriteThrough);
                    }

                    // Set log file name.
                    SetLogFileName(test.Name);
                    test.LogFile = m_outputXmlFileName;

                    if (!(test is ProfilerTest))
                    {
                        m_log = log;
                        m_log.StartLog(m_outputXmlFileName, m_xslPath);
                    }

                    // Get build and exe path.
                    string buildPath = GetBuildPath(file);
                    test.ExeLocation = buildPath + m_assemblyName + ".exe";

                    // If this is a profiler test, run the test and return.
                    if (test is ProfilerTest)
                    {
                        return(RunProfilerTest(file, test.ExeLocation, buildPath, referenceList));
                    }

                    string testName = m_logFileName.Replace(".xml", string.Empty);
                    StartCodeCoverage(testName);
                    m_startTime = DateTime.Now;

                    switch (m_currentAppType)
                    {
                    case TestType.Device:
                        try
                        {
                            RunDeviceTest(buildPath, test.ExeLocation, referenceList);
                        }
                        catch (Exception ex)
                        {
                            Close();
                            return(HarnessResult(ex));
                        }
                        break;

                    case TestType.DeviceDesktop:
                        // Desktop apps under
                        //              DevBox: %spoclient%\Test\Platform\Tests\Desktop\Applications\
                        //              TestBox: %programfiles%\Microsoft .NET Micro Framework\<version>\Tests\Desktop\
                        desktopThread = StartDesktopApplication(pathForFile, ref csprojFilePath);
                        pipeStream.WaitForConnection();

                        // Get references and build the device test.
                        GetProjectReferences(file, ref csprojFilePath);
                        BuildTest(csprojFilePath, TestType.Device);
                        RunDeviceTest(buildPath, test.ExeLocation, referenceList);
                        break;

                    case TestType.Desktop:
                        RunDesktopTest(test.ExeLocation);
                        break;
                    }

                    m_endTime = DateTime.Now;
                    StopCodeCoverage(testName);
                }
                catch (Exception ex)
                {
                    m_endTime = DateTime.Now;
                    if (null != m_log)
                    {
                        m_log.WriteElementString("Test_Exception", ex.Message + ex.StackTrace);
                    }
                    Utils.WriteToEventLog("Exception: " + ex.ToString());
                    Close();
                    if (ex is ApplicationException && ex.Message.ToLower().Contains("build failure"))
                    {
                        return(HarnessExecutionResult.Abort);
                    }
                    else
                    {
                        return(HarnessExecutionResult.Unavailable);
                    }
                }
                finally
                {
                    if (m_currentAppType == TestType.DeviceDesktop)
                    {
                        StopDesktopApplication(pipeStream, desktopThread);
                    }

                    Close();

                    if (null != m_log)
                    {
                        switch (m_currentAppType)
                        {
                        case TestType.DeviceDesktop:
                            SaveLogFile(test.Location, TestType.DeviceDesktop);
                            break;

                        default:
                            SaveLogFile(test.Location);
                            break;
                        }
                    }

                    // Change test results back to unknown for the next test.
                    TestResults = Result.NotKnown;
                }

                if (!string.IsNullOrEmpty(file))
                {
                    if (!string.Equals(m_transport.ToLower(), "emulator"))
                    {
                        m_log.SynchronizeLogTime(file, m_initialTime);
                    }
                }
                else
                {
                    return(HarnessExecutionResult.Unavailable);
                }
            }
            catch (Exception ex)
            {
                if (ex is FileNotFoundException)
                {
                    throw ex;
                }

                Utils.WriteToEventLog(string.Format("Exception in Harness: {0}", ex.ToString()));
                return(HarnessExecutionResult.Abort);
            }

            return(HarnessExecutionResult.Success);
        }
예제 #11
0
        private string BuildFilePath(BaseTest test, ref string xslPath)
        {
            string toolSrcPath = @"\test\Platform\";
            string srcPath = @"\test\Platform\Tests\";
            string enlistmentPath = GetEnlistmentPath;
            bool isTestMsiInstalled = IsTestMsiInstalled;
            string file = string.Empty;
            xslPath = "Results.xsl";

            if (!String.IsNullOrEmpty(test.Name))
            {
                // If dev environment, use the test files from the enlistment.
                // Else If the test msi is installed, use the test files from the installed location.
                // Else, neither enlistment exists nor test msi is installed - throw a file not found exception.
                if (m_isDevEnvironment)
                {
                    if (!Path.IsPathRooted(test.Name))
                    {
                        test.Name = Path.Combine(Environment.GetEnvironmentVariable("SPOCLIENT"), test.Name);
                    }

                    if (File.Exists(test.Name))
                    {
                        file = Path.GetFullPath(test.Name);
                    }
                    else
                    {
                        if (test is ProfilerTest)
                        {
                            file = enlistmentPath + srcPath + @"\Performance\ProfilerTests\" + test.Name;
                        }
                        else
                        {
                            file = enlistmentPath + srcPath + test.Name;
                        }
                    }

                    if (!File.Exists(file))
                    {
                        throw new FileNotFoundException();
                    }

                    // Set the log folder path.
                    m_mfLogDirectory = enlistmentPath + toolSrcPath + @"Tools\MFTestSystem\Results";
                }
                else if (isTestMsiInstalled)
                {
                    m_mfLogDirectory = m_mfLogDirectory = string.Format("{0}{1}\\{2}\\", 
                        m_installPath, "Results", TestSystem.TestResultId);
                    
                    if (test is ProfilerTest)
                    {
                        srcPath = string.Format("{0}{1}", m_installPath, @"ManagedProfilerTests\");
                    }
                    else
                    {
                        // If the file doesn't exist assume that the path is not an absolute path.                        
                        srcPath = string.Format("{0}{1}", m_installPath, @"TestCases\");
                        toolSrcPath = string.Format("{0}{1}", m_installPath, @"Tools\");                        
                    }

                    if (!File.Exists(test.Location))
                    {
                        file = srcPath + test.Location;
                    }
                    else
                    {
                        file = test.Location;
                    }
                }
                else
                {
                    throw new System.Exception("Could not find the tests. Please install the test msi");
                }

                if (!File.Exists(file))
                {
                    Console.WriteLine("\tERROR: File not found: " + file);
                    Utils.WriteToEventLog("File Not Found: " + file);
                    throw new System.ArgumentException(
                        "Specified file " + file + " could not be found.");
                }
            }

            test.Location = file;
            return file;
        }
예제 #12
0
        internal HarnessExecutionResult Run(BaseTest test, XmlLog log)
        {
            m_initialTime = new DateTime();
            string file = string.Empty;
            string csprojFilePath = string.Empty;
            m_logFileSaved = false;
            m_debugText = new StringBuilder();
            Thread desktopThread = null;
            NamedPipeServerStream pipeStream = null;                     

            try
            {
                // Set the file paths.
                file = BuildFilePath(test, ref m_xslPath);
                Console.WriteLine("\nTest: " + test.Name);
                string pathForFile = file.Substring(0, file.LastIndexOf(@"\"));
                if (!pathForFile.EndsWith(@"\"))
                {
                    pathForFile = string.Format(@"{0}\", pathForFile);
                }
                string slnFileName = test.Name.Substring(test.Name.LastIndexOf(@"\") + 1);

                try
                {
                    // Get reference list and build the test
                    ArrayList referenceList = GetProjectReferences(file, ref csprojFilePath);
                    if (m_currentAppType != TestType.DeviceDesktop)
                    {
                        BuildTest(csprojFilePath, m_currentAppType);
                    }
                    else
                    {
                        pipeStream = new
                                    NamedPipeServerStream("MFHarnessPipe", PipeDirection.InOut, 1,
                                    PipeTransmissionMode.Message, PipeOptions.WriteThrough); 
                    }

                    // Set log file name.
                    SetLogFileName(test.Name);
                    test.LogFile = m_outputXmlFileName;

                    if (!(test is ProfilerTest))
                    {
                        m_log = log;
                        m_log.StartLog(m_outputXmlFileName, m_xslPath);
                    }

                    // Get build and exe path.
                    string buildPath = GetBuildPath(file);
                    test.ExeLocation = buildPath + m_assemblyName + ".exe";

                    // If this is a profiler test, run the test and return.
                    if (test is ProfilerTest)
                    {
                        return RunProfilerTest(file, test.ExeLocation, buildPath, referenceList);
                    }                    

                    string testName = m_logFileName.Replace(".xml", string.Empty);
                    StartCodeCoverage(testName);
                    m_startTime = DateTime.Now;

                    switch(m_currentAppType)
                    {
                        case TestType.Device:
                            try
                            {
                                RunDeviceTest(buildPath, test.ExeLocation, referenceList);
                            }
                            catch (Exception ex)
                            {
                                Close();
                                return HarnessResult(ex);
                            }
                            break;

                        case TestType.DeviceDesktop:
                            // Desktop apps under 
                            //              DevBox: %spoclient%\Test\Platform\Tests\Desktop\Applications\
                            //              TestBox: %programfiles%\Microsoft .NET Micro Framework\<version>\Tests\Desktop\
                            desktopThread = StartDesktopApplication(pathForFile, ref csprojFilePath);
                            pipeStream.WaitForConnection();                            
                            
                            // Get references and build the device test.
                            GetProjectReferences(file, ref csprojFilePath);
                            BuildTest(csprojFilePath, TestType.Device);                               
                            RunDeviceTest(buildPath, test.ExeLocation, referenceList);
                            break;

                        case TestType.Desktop:
                            RunDesktopTest(test.ExeLocation);
                            break;
                    }

                    m_endTime = DateTime.Now;
                    StopCodeCoverage(testName);
                }
                catch (Exception ex)
                {                    
                    m_endTime = DateTime.Now;
                    if (null != m_log)
                    {
                        m_log.WriteElementString("Test_Exception", ex.Message + ex.StackTrace);
                    }
                    Utils.WriteToEventLog("Exception: " + ex.ToString());
                    Close();
                    if (ex is ApplicationException && ex.Message.ToLower().Contains("build failure"))
                    {
                        return HarnessExecutionResult.Abort;
                    }
                    else
                    {
                        return HarnessExecutionResult.Unavailable;
                    }
                }
                finally
                {
                    if (m_currentAppType == TestType.DeviceDesktop)
                    {
                        StopDesktopApplication(pipeStream, desktopThread);
                    }

                    Close();

                    if (null != m_log)
                    {
                        switch(m_currentAppType)
                        {
                            case TestType.DeviceDesktop:
                                SaveLogFile(test.Location, TestType.DeviceDesktop);
                                break;

                            default:
                                SaveLogFile(test.Location);
                                break;                            
                        }                        
                    }

                    // Change test results back to unknown for the next test.
                    TestResults = Result.NotKnown;
                }

                if (!string.IsNullOrEmpty(file))
                {
                    if (!string.Equals(m_transport.ToLower(), "emulator"))
                    {
                        m_log.SynchronizeLogTime(file, m_initialTime);
                    }
                }
                else
                {
                    return HarnessExecutionResult.Unavailable;
                }                
            }
            catch (Exception ex)
            {
                if (ex is FileNotFoundException)
                {
                    throw ex;
                }

                Utils.WriteToEventLog(string.Format("Exception in Harness: {0}", ex.ToString()));
                return HarnessExecutionResult.Abort;
            }

            return HarnessExecutionResult.Success;
        }
예제 #13
0
 /// <summary>
 /// Overloaded Constructor
 /// </summary>
 /// <param name="test">The name of the sln file.</param>
 /// <param name="harness">The Harness instance.</param>
 /// <param name="log">Path to log file. </param>
 internal TimedTest(BaseTest test, Harness harness, XmlLog log)
 {
     this.m_test    = test;
     this.m_harness = harness;
     this.m_log     = log;
 }