/// <summary> /// Parses the error list from visual studio and collect the results from TcUnit /// </summary> /// <returns> /// null if parse failed or results are not ready /// If parse succeeds, the test results are returned /// </returns> public TcUnitTestResult ParseResults(IEnumerable <ErrorList.Error> errors, string unitTestTaskName) { TcUnitTestResult tcUnitTestResult = new TcUnitTestResult(); if (!AreTestResultsAvailable()) { return(null); } else { ErrorLogEntryType expectedErrorLogEntryType = ErrorLogEntryType.TEST_SUITE_FINISHED_RUNNING; tcUnitTestResult.AddGeneralTestResults((uint)numberOfTestSuites, (uint)numberOfTests, (uint)numberOfSuccessfulTests, (uint)numberOfFailedTests); // Temporary variables uint currentTestIdentity = 0; uint currentTestCaseInTestSuiteNumber = 0; /* Storage variables */ // Test suite string testSuiteName = ""; uint testSuiteIdentity = 0; uint testSuiteNumberOfTests = 0; uint testSuiteNumberOfFailedTests = 0; // Test case string testSuiteTestCaseName = ""; string testSuiteTestCaseClassName = ""; string testSuiteTestCaseStatus = ""; string testSuiteTestCaseFailureMessage = ""; string testSuiteTestCaseAssertType = ""; uint testSuiteTestCaseNumberOfAsserts = 0; List <TcUnitTestResult.TestCaseResult> testSuiteTestCaseResults = new List <TcUnitTestResult.TestCaseResult>(); /* Find all test suite IDs. There must be one ID for every test suite. The ID starts at 0, so * if we have 5 test suites, we should expect the IDs to be 0, 1, 2, 3, 4 */ foreach (var item in errors.Where(e => e.ErrorLevel == vsBuildErrorLevel.vsBuildErrorLevelLow)) { string tcUnitAdsMessage; // Only do further processing if message is from TcUnit if (IsTcUnitAdsMessage(item.Description, unitTestTaskName)) { tcUnitAdsMessage = RemoveEverythingButTcUnitAdsMessage(item.Description, unitTestTaskName); /* ------------------------------------- * Look for test suite finished running * ------------------------------------- */ if (tcUnitAdsMessage.Contains("Test suite ID=" + currentTestIdentity + " '")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_SUITE_FINISHED_RUNNING) { // Reset stored variables testSuiteName = ""; testSuiteIdentity = 0; testSuiteNumberOfTests = 0; testSuiteNumberOfFailedTests = 0; testSuiteTestCaseName = ""; testSuiteTestCaseClassName = ""; testSuiteTestCaseStatus = ""; testSuiteTestCaseFailureMessage = ""; testSuiteTestCaseAssertType = ""; testSuiteTestCaseResults.Clear(); // Parse test suite name testSuiteIdentity = currentTestIdentity; testSuiteName = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf("Test suite ID=" + currentTestIdentity + " '") + currentTestIdentity.ToString().Length + 16); /* If last character is ', remove it */ if (String.Equals(testSuiteName[testSuiteName.Length - 1].ToString(), "'", StringComparison.InvariantCultureIgnoreCase)) { testSuiteName = testSuiteName.Remove(testSuiteName.Length - 1); } expectedErrorLogEntryType = ErrorLogEntryType.TEST_SUITE_STATISTICS; } else { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_SUITE_FINISHED_RUNNING.ToString()); return(null); } } /* ------------------------------------- * Look for test suite statistics * ------------------------------------- */ else if (tcUnitAdsMessage.Contains("ID=" + currentTestIdentity + " number of tests=") && tcUnitAdsMessage.Contains(", number of failed tests=")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_SUITE_STATISTICS) { // Parse test suite results (number of tests + number of failed tests) string numberOfTestsString = Utilities.GetStringBetween(tcUnitAdsMessage, "ID=" + currentTestIdentity + " number of tests=", ", number of failed tests="); if (!uint.TryParse(numberOfTestsString, out testSuiteNumberOfTests)) { // Handle error } string numberOfFailedTestsString = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf(", number of failed tests=") + 25); if (!uint.TryParse(numberOfFailedTestsString, out testSuiteNumberOfFailedTests)) { // Handle error } /* Now two things can happen. Either the testsuite didn't have any tests (testSuiteNumberOfTests=0) * or it had tests. If it didn't have any tests, we store the testsuite result here and go to the * next test suite. If it had tests, we continue */ if (testSuiteNumberOfTests.Equals(0)) { // Store test suite & go to next test suite TcUnitTestResult.TestSuiteResult tsResult = new TcUnitTestResult.TestSuiteResult(testSuiteName, testSuiteIdentity, testSuiteNumberOfTests, testSuiteNumberOfFailedTests); tcUnitTestResult.AddNewTestSuiteResult(tsResult); expectedErrorLogEntryType = ErrorLogEntryType.TEST_SUITE_FINISHED_RUNNING; } else { expectedErrorLogEntryType = ErrorLogEntryType.TEST_NAME; currentTestCaseInTestSuiteNumber = 1; } currentTestIdentity++; } else { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_SUITE_STATISTICS.ToString()); return(null); } } /* ------------------------------------- * Look for test name * ------------------------------------- */ else if (tcUnitAdsMessage.Contains("Test name=")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_NAME && currentTestCaseInTestSuiteNumber <= testSuiteNumberOfTests) { // Parse test name string testName = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf("Test name=") + 10); testSuiteTestCaseName = testName; currentTestCaseInTestSuiteNumber++; expectedErrorLogEntryType = ErrorLogEntryType.TEST_CLASS_NAME; } else { if (expectedErrorLogEntryType != ErrorLogEntryType.TEST_NAME) { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_NAME.ToString()); } else { log.Error("ERROR: While parsing TcUnit results, got test case number " + currentTestCaseInTestSuiteNumber + " but expected amount is " + testSuiteNumberOfTests); } return(null); } } /* ------------------------------------- * Look for test class name * ------------------------------------- */ else if (tcUnitAdsMessage.Contains("Test class name=")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_CLASS_NAME) { // Parse test class name string testClassName = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf("Test class name=") + 16); testSuiteTestCaseClassName = testClassName; expectedErrorLogEntryType = ErrorLogEntryType.TEST_STATUS_AND_NUMBER_OF_ASSERTS; } else { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_CLASS_NAME.ToString()); return(null); } } /* ------------------------------------- * Look for test status and number of asserts * ------------------------------------- */ else if (tcUnitAdsMessage.Contains("Test status=") && tcUnitAdsMessage.Contains(", number of asserts=")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_STATUS_AND_NUMBER_OF_ASSERTS) { // Parse test status string testStatus = Utilities.GetStringBetween(tcUnitAdsMessage, "Test status=", ", number of asserts="); string testNumberOfAssertions = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf(", number of asserts=") + 20); testSuiteTestCaseStatus = testStatus; if (!uint.TryParse(testNumberOfAssertions, out testSuiteTestCaseNumberOfAsserts)) { // Handle error } /* Now two things can happen. Either the test result/status is FAIL, in which case we want to read the * assertion information. If the test result/status is not FAIL, we either continue to the next test case * or the next test suite */ if (testStatus.Equals("FAIL")) { expectedErrorLogEntryType = ErrorLogEntryType.TEST_ASSERT_MESSAGE; } else { // Store test case TcUnitTestResult.TestCaseResult tcResult = new TcUnitTestResult.TestCaseResult(testSuiteTestCaseName, testSuiteTestCaseClassName, testSuiteTestCaseStatus, "", "", testSuiteTestCaseNumberOfAsserts); // Add test case result to test cases results testSuiteTestCaseResults.Add(tcResult); if (currentTestCaseInTestSuiteNumber <= testSuiteNumberOfTests) { // More tests in this test suite expectedErrorLogEntryType = ErrorLogEntryType.TEST_NAME; // Goto next test case } else { // Last test case in this test suite // Create test suite result TcUnitTestResult.TestSuiteResult tsResult = new TcUnitTestResult.TestSuiteResult(testSuiteName, testSuiteIdentity, testSuiteNumberOfTests, testSuiteNumberOfFailedTests); // Add test case results to test suite foreach (TcUnitTestResult.TestCaseResult tcResultToBeStored in testSuiteTestCaseResults) { tsResult.TestCaseResults.Add(tcResultToBeStored); } // Add test suite to final test results tcUnitTestResult.AddNewTestSuiteResult(tsResult); expectedErrorLogEntryType = ErrorLogEntryType.TEST_SUITE_FINISHED_RUNNING; } } } else { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_STATUS_AND_NUMBER_OF_ASSERTS.ToString()); return(null); } } /* ------------------------------------- * Look for test assert message * ------------------------------------- */ else if (tcUnitAdsMessage.Contains("Test assert message=")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_ASSERT_MESSAGE) { // Parse test assert message string testAssertMessage = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf("Test assert message=") + 20); testSuiteTestCaseFailureMessage = testAssertMessage; expectedErrorLogEntryType = ErrorLogEntryType.TEST_ASSERT_TYPE; } else { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_ASSERT_MESSAGE.ToString()); return(null); } } /* ------------------------------------- * Look for test assert type * ------------------------------------- */ else if (tcUnitAdsMessage.Contains("Test assert type=")) { if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_ASSERT_TYPE /* Even though we might expect a test assertion message, the test assertion might not have included one message, and thus we will * skip/not receive any TEST_ASSERTION_MESSAGE but rather instead get a TEST_ASSERT_TYPE */ || expectedErrorLogEntryType == ErrorLogEntryType.TEST_ASSERT_MESSAGE) { // Make sure to reset the assertion message to empty string if we have not received any test assert message if (expectedErrorLogEntryType == ErrorLogEntryType.TEST_ASSERT_MESSAGE) { testSuiteTestCaseFailureMessage = ""; } // Parse test assert type string testAssertType = tcUnitAdsMessage.Substring(tcUnitAdsMessage.LastIndexOf("Test assert type=") + 17); testSuiteTestCaseAssertType = testAssertType; // Store test case TcUnitTestResult.TestCaseResult tcResult = new TcUnitTestResult.TestCaseResult(testSuiteTestCaseName, testSuiteTestCaseClassName, testSuiteTestCaseStatus, testSuiteTestCaseFailureMessage, testSuiteTestCaseAssertType, testSuiteTestCaseNumberOfAsserts); // Add test case result to test cases results testSuiteTestCaseResults.Add(tcResult); if (currentTestCaseInTestSuiteNumber <= testSuiteNumberOfTests) { // More tests in this test suite expectedErrorLogEntryType = ErrorLogEntryType.TEST_NAME; // Goto next test case } else { // Last test case in this test suite // Create test suite result TcUnitTestResult.TestSuiteResult tsResult = new TcUnitTestResult.TestSuiteResult(testSuiteName, testSuiteIdentity, testSuiteNumberOfTests, testSuiteNumberOfFailedTests); // Add test case results to test suite foreach (TcUnitTestResult.TestCaseResult tcResultToBeStored in testSuiteTestCaseResults) { tsResult.TestCaseResults.Add(tcResultToBeStored); } // Add test suite to final test results tcUnitTestResult.AddNewTestSuiteResult(tsResult); expectedErrorLogEntryType = ErrorLogEntryType.TEST_SUITE_FINISHED_RUNNING; } } else { log.Error("ERROR: While parsing TcUnit results, expected " + expectedErrorLogEntryType.ToString() + " but got " + ErrorLogEntryType.TEST_ASSERT_TYPE.ToString()); return(null); } } } } log.Info("Done collecting TC results"); return(tcUnitTestResult); } }
static void Main(string[] args) { bool showHelp = false; bool enableDebugLoggingLevel = false; Console.CancelKeyPress += new ConsoleCancelEventHandler(CancelKeyPressHandler); log4net.GlobalContext.Properties["LogLocation"] = AppDomain.CurrentDomain.BaseDirectory + "\\logs"; log4net.Config.XmlConfigurator.ConfigureAndWatch(new System.IO.FileInfo(AppDomain.CurrentDomain.BaseDirectory + "log4net.config")); OptionSet options = new OptionSet() .Add("v=|VisualStudioSolutionFilePath=", "The full path to the TwinCAT project (sln-file)", v => VisualStudioSolutionFilePath = v) .Add("t=|TcUnitTaskName=", "[OPTIONAL] The name of the task running TcUnit defined under \"Tasks\"", t => TcUnitTaskName = t) .Add("a=|AmsNetId=", "[OPTIONAL] The AMS NetId of the device of where the project and TcUnit should run", a => AmsNetId = a) .Add("w=|TcVersion=", "[OPTIONAL] The TwinCAT version to be used to load the TwinCAT project", w => ForceToThisTwinCATVersion = w) .Add("u=|Timeout=", "[OPTIONAL] Timeout the process with an error after X minutes", u => Timeout = u) .Add("d|debug", "[OPTIONAL] Increase debug message verbosity", d => enableDebugLoggingLevel = d != null) .Add("?|h|help", h => showHelp = h != null); try { options.Parse(args); } catch (OptionException e) { Console.WriteLine(e.Message); Console.WriteLine("Try `TcUnit-Runner --help' for more information."); Environment.Exit(Constants.RETURN_ARGUMENT_ERROR); } if (showHelp) { DisplayHelp(options); Environment.Exit(Constants.RETURN_SUCCESSFULL); } /* Set logging level. * This is handled by changing the log4net.config file on the fly. * The following levels are defined in order of increasing priority: * - ALL * - DEBUG * - INFO * - WARN * - ERROR * - FATAL * - OFF */ XmlDocument doc = new XmlDocument(); doc.Load(AppDomain.CurrentDomain.BaseDirectory + "log4net.config"); XmlNode root = doc.DocumentElement; XmlNode subNode1 = root.SelectSingleNode("root"); XmlNode nodeForModify = subNode1.SelectSingleNode("level"); if (enableDebugLoggingLevel) { nodeForModify.Attributes[0].Value = "DEBUG"; } else { nodeForModify.Attributes[0].Value = "INFO"; } doc.Save(AppDomain.CurrentDomain.BaseDirectory + "log4net.config"); System.Threading.Thread.Sleep(500); // A tiny sleep just to make sure that log4net manages to detect the change in the file /* Make sure the user has supplied the path for the Visual Studio solution file. * Also verify that this file exists. */ if (VisualStudioSolutionFilePath == null) { log.Error("Visual studio solution path not provided!"); Environment.Exit(Constants.RETURN_VISUAL_STUDIO_SOLUTION_PATH_NOT_PROVIDED); } if (!File.Exists(VisualStudioSolutionFilePath)) { log.Error("Visual studio solution " + VisualStudioSolutionFilePath + " does not exist!"); Environment.Exit(Constants.RETURN_VISUAL_STUDIO_SOLUTION_PATH_NOT_FOUND); } LogBasicInfo(); /* Start a timeout for the process(es) if the user asked for it */ if (Timeout != null) { log.Info("Timeout enabled - process(es) timesout after " + Timeout + " minute(s)"); System.Timers.Timer timeout = new System.Timers.Timer(Int32.Parse(Timeout) * 1000 * 60); timeout.Elapsed += KillProcess; timeout.AutoReset = false; timeout.Start(); } MessageFilter.Register(); TwinCATProjectFilePath = TcFileUtilities.FindTwinCATProjectFile(VisualStudioSolutionFilePath); if (String.IsNullOrEmpty(TwinCATProjectFilePath)) { log.Error("Did not find TwinCAT project file in solution. Is this a TwinCAT project?"); Environment.Exit(Constants.RETURN_TWINCAT_PROJECT_FILE_NOT_FOUND); } if (!File.Exists(TwinCATProjectFilePath)) { log.Error("TwinCAT project file " + TwinCATProjectFilePath + " does not exist!"); Environment.Exit(Constants.RETURN_TWINCAT_PROJECT_FILE_NOT_FOUND); } string tcVersion = TcFileUtilities.GetTcVersion(TwinCATProjectFilePath); if (String.IsNullOrEmpty(tcVersion)) { log.Error("Did not find TwinCAT version in TwinCAT project file path"); Environment.Exit(Constants.RETURN_TWINCAT_VERSION_NOT_FOUND); } try { vsInstance = new VisualStudioInstance(@VisualStudioSolutionFilePath, tcVersion, ForceToThisTwinCATVersion); bool isTcVersionPinned = XmlUtilities.IsTwinCATProjectPinned(TwinCATProjectFilePath); log.Info("Version is pinned: " + isTcVersionPinned); vsInstance.Load(isTcVersionPinned); } catch { log.Error("Error loading VS DTE. Is the correct version of Visual Studio and TwinCAT installed? Is the TcUnit-Runner running with administrator privileges?"); CleanUpAndExitApplication(Constants.RETURN_ERROR_LOADING_VISUAL_STUDIO_DTE); } try { vsInstance.LoadSolution(); } catch { log.Error("Error loading the solution. Try to open it manually and make sure it's possible to open and that all dependencies are working"); CleanUpAndExitApplication(Constants.RETURN_ERROR_LOADING_VISUAL_STUDIO_SOLUTION); } if (vsInstance.GetVisualStudioVersion() == null) { log.Error("Did not find Visual Studio version in Visual Studio solution file"); CleanUpAndExitApplication(Constants.RETURN_ERROR_FINDING_VISUAL_STUDIO_SOLUTION_VERSION); } AutomationInterface automationInterface = new AutomationInterface(vsInstance.GetProject()); if (automationInterface.PlcTreeItem.ChildCount <= 0) { log.Error("No PLC-project exists in TwinCAT project"); CleanUpAndExitApplication(Constants.RETURN_NO_PLC_PROJECT_IN_TWINCAT_PROJECT); } ITcSmTreeItem realTimeTasksTreeItem = automationInterface.RealTimeTasksTreeItem; /* Task name provided */ if (!String.IsNullOrEmpty(TcUnitTaskName)) { log.Info("Setting task '" + TcUnitTaskName + "' enable and autostart, and all other tasks (if existing) to disable and non-autostart"); bool foundTcUnitTaskName = false; /* Find all tasks, and check whether the user provided TcUnit task is amongst them. * Also update the task object (Update <Disabled> and <Autostart>-tag) */ foreach (ITcSmTreeItem child in realTimeTasksTreeItem) { ITcSmTreeItem testTreeItem = realTimeTasksTreeItem.LookupChild(child.Name); string xmlString = testTreeItem.ProduceXml(); string newXmlString = ""; try { if (TcUnitTaskName.Equals(XmlUtilities.GetItemNameFromRealTimeTaskXML(xmlString))) { foundTcUnitTaskName = true; newXmlString = XmlUtilities.SetDisabledAndAndAutoStartOfRealTimeTaskXml(xmlString, false, true); } else { newXmlString = XmlUtilities.SetDisabledAndAndAutoStartOfRealTimeTaskXml(xmlString, true, false); } testTreeItem.ConsumeXml(newXmlString); System.Threading.Thread.Sleep(3000); } catch { log.Error("Could not parse real time task XML data"); CleanUpAndExitApplication(Constants.RETURN_NOT_POSSIBLE_TO_PARSE_REAL_TIME_TASK_XML_DATA); } } if (!foundTcUnitTaskName) { log.Error("Could not find task '" + TcUnitTaskName + "' in TwinCAT project"); CleanUpAndExitApplication(Constants.RETURN_FAILED_FINDING_DEFINED_UNIT_TEST_TASK_IN_TWINCAT_PROJECT); } } /* No task name provided */ else { log.Info("No task name provided. Assuming only one task exists"); /* Check that only one task exists */ if (realTimeTasksTreeItem.ChildCount.Equals(1)) { // Get task name ITcSmTreeItem child = realTimeTasksTreeItem.get_Child(1); ITcSmTreeItem testTreeItem = realTimeTasksTreeItem.LookupChild(child.Name); string xmlString = testTreeItem.ProduceXml(); TcUnitTaskName = XmlUtilities.GetItemNameFromRealTimeTaskXML(xmlString); log.Info("Found task with name '" + TcUnitTaskName + "'"); string newXmlString = ""; newXmlString = XmlUtilities.SetDisabledAndAndAutoStartOfRealTimeTaskXml(xmlString, false, true); testTreeItem.ConsumeXml(newXmlString); System.Threading.Thread.Sleep(3000); } /* Less ore more than one task, which is an error */ else { log.Error("The number of tasks is not equal to 1 (one). Found " + realTimeTasksTreeItem.ChildCount.ToString() + " number of tasks. Please provide which task is the TcUnit task"); CleanUpAndExitApplication(Constants.RETURN_TASK_COUNT_NOT_EQUAL_TO_ONE); } } /* Build the solution and collect any eventual errors. Make sure to * filter out everything that is an error */ vsInstance.CleanSolution(); vsInstance.BuildSolution(); ErrorItems errorsBuild = vsInstance.GetErrorItems(); int tcBuildWarnings = 0; int tcBuildError = 0; for (int i = 1; i <= errorsBuild.Count; i++) { ErrorItem item = errorsBuild.Item(i); if ((item.ErrorLevel != vsBuildErrorLevel.vsBuildErrorLevelLow)) { if (item.ErrorLevel == vsBuildErrorLevel.vsBuildErrorLevelMedium) { tcBuildWarnings++; } else if (item.ErrorLevel == vsBuildErrorLevel.vsBuildErrorLevelHigh) { tcBuildError++; log.Error("Description: " + item.Description); log.Error("ErrorLevel: " + item.ErrorLevel); log.Error("Filename: " + item.FileName); } } } /* If we don't have any errors, activate the configuration and * start/restart TwinCAT */ if (tcBuildError.Equals(0)) { /* Check whether the user has provided an AMS NetId. If so, use it. Otherwise use * the local AMS NetId */ if (String.IsNullOrEmpty(AmsNetId)) { AmsNetId = Constants.LOCAL_AMS_NET_ID; } log.Info("Setting target NetId to '" + AmsNetId + "'"); automationInterface.ITcSysManager.SetTargetNetId(AmsNetId); log.Info("Enabling boot project and setting BootProjectAutostart on " + automationInterface.ITcSysManager.GetTargetNetId()); for (int i = 1; i <= automationInterface.PlcTreeItem.ChildCount; i++) { ITcSmTreeItem plcProject = automationInterface.PlcTreeItem.Child[i]; ITcPlcProject iecProject = (ITcPlcProject)plcProject; iecProject.BootProjectAutostart = true; /* add the port that is used for this PLC to the AmsPorts list that * is later used to monitory the AdsState */ string xmlString = plcProject.ProduceXml(); AmsPorts.Add(XmlUtilities.AmsPort(xmlString)); } System.Threading.Thread.Sleep(1000); automationInterface.ActivateConfiguration(); // Wait System.Threading.Thread.Sleep(10000); /* Clean the solution. This is the only way to clean the error list which needs to be * clean prior to starting the TwinCAT runtime */ vsInstance.CleanSolution(); // Wait System.Threading.Thread.Sleep(10000); automationInterface.StartRestartTwinCAT(); } else { log.Error("Build errors in project"); CleanUpAndExitApplication(Constants.RETURN_BUILD_ERROR); } /* Establish a connection to the ADS router */ TcAdsClient tcAdsClient = new TcAdsClient(); /* Run TcUnit until the results have been returned */ TcUnitResultCollector tcUnitResultCollector = new TcUnitResultCollector(); ErrorList errorList = new ErrorList(); log.Info("Waiting for results from TcUnit..."); ErrorItems errorItems; while (true) { System.Threading.Thread.Sleep(10000); /* Monitor the AdsState for each PLC that is used in the * solution. If we can't connect to the Ads Router, we just * carry on. */ try { foreach (int amsPort in AmsPorts) { tcAdsClient.Connect(AmsNetId, amsPort); AdsState adsState = tcAdsClient.ReadState().AdsState; if (adsState != AdsState.Run) { log.Error("Invalid AdsState " + adsState + "<>" + AdsState.Run + ". This could indicate a PLC Exception, terminating ..."); Environment.Exit(Constants.RETURN_INVALID_ADSSTATE); } } } catch (Exception ex) { } finally { tcAdsClient.Disconnect(); } errorItems = vsInstance.GetErrorItems(); var newErrors = errorList.AddNew(errorItems); if (log.IsDebugEnabled) { foreach (var error in newErrors) { log.Debug(error.ErrorLevel + ": " + error.Description); } } log.Info("... got " + errorItems.Count + " report lines so far."); if (tcUnitResultCollector.AreResultsAvailable(errorItems)) { log.Info("All results from TcUnit obtained"); /* The last test suite result can be returned after that we have received the test results, wait a few seconds * and fetch again */ System.Threading.Thread.Sleep(10000); break; } } List <ErrorList.Error> errors = new List <ErrorList.Error>(errorList.Where(e => (e.ErrorLevel == vsBuildErrorLevel.vsBuildErrorLevelHigh || e.ErrorLevel == vsBuildErrorLevel.vsBuildErrorLevelLow))); List <ErrorList.Error> errorsSorted = errors.OrderBy(o => o.Description).ToList(); /* Parse all events (from the error list) from Visual Studio and store the results */ TcUnitTestResult testResult = tcUnitResultCollector.ParseResults(errorsSorted, TcUnitTaskName); /* Write xUnit XML report */ if (testResult != null) { // No need to check if file (VisualStudioSolutionFilePath) exists, as this has already been done string VisualStudioSolutionDirectoryPath = Path.GetDirectoryName(VisualStudioSolutionFilePath); string XUnitReportFilePath = VisualStudioSolutionDirectoryPath + "\\" + Constants.XUNIT_RESULT_FILE_NAME; log.Info("Writing xUnit XML file to " + XUnitReportFilePath); // Overwrites all existing content (if existing) XunitXmlCreator.WriteXml(testResult, XUnitReportFilePath); } CleanUpAndExitApplication(Constants.RETURN_SUCCESSFULL); }
public static void WriteXml(TcUnitTestResult testResults, string filePath) { XmlDocument xmlDoc = new XmlDocument(); // <testsuites> XmlElement testSuitesNode = xmlDoc.CreateElement("testsuites"); xmlDoc.AppendChild(testSuitesNode); // <testsuites> attributes XmlAttribute testSuitesAttributeFailures = xmlDoc.CreateAttribute("failures"); testSuitesAttributeFailures.Value = testResults.GetNumberOfFailedTestCases().ToString(); testSuitesNode.Attributes.Append(testSuitesAttributeFailures); XmlAttribute testSuitesAttributeTests = xmlDoc.CreateAttribute("tests"); testSuitesAttributeTests.Value = testResults.GetNumberOfTestCases().ToString(); testSuitesNode.Attributes.Append(testSuitesAttributeTests); foreach (TcUnitTestResult.TestSuiteResult tsResult in testResults) { // <testsuite> XmlElement testSuiteNode = xmlDoc.CreateElement("testsuite"); // <testsuite> attributes XmlAttribute testSuiteAttributeIdentity = xmlDoc.CreateAttribute("id"); testSuiteAttributeIdentity.Value = tsResult.Identity.ToString(); testSuiteNode.Attributes.Append(testSuiteAttributeIdentity); XmlAttribute testSuiteAttributeName = xmlDoc.CreateAttribute("name"); testSuiteAttributeName.Value = tsResult.Name; testSuiteNode.Attributes.Append(testSuiteAttributeName); XmlAttribute testSuiteAttributeTests = xmlDoc.CreateAttribute("tests"); testSuiteAttributeTests.Value = tsResult.NumberOfTests.ToString(); testSuiteNode.Attributes.Append(testSuiteAttributeTests); XmlAttribute testSuiteAttributeFailures = xmlDoc.CreateAttribute("failures"); testSuiteAttributeFailures.Value = tsResult.NumberOfFailedTests.ToString(); testSuiteNode.Attributes.Append(testSuiteAttributeFailures); // <testcase> foreach (TcUnitTestResult.TestCaseResult tcResult in tsResult.TestCaseResults) { XmlElement testCaseNode = xmlDoc.CreateElement("testcase"); // <testcase> attributes XmlAttribute testCaseAttributeName = xmlDoc.CreateAttribute("name"); testCaseAttributeName.Value = tcResult.TestName; testCaseNode.Attributes.Append(testCaseAttributeName); XmlAttribute testCaseAttributeNumberOfAsserts = xmlDoc.CreateAttribute("assertions"); testCaseAttributeNumberOfAsserts.Value = tcResult.NumberOfAsserts.ToString(); testCaseNode.Attributes.Append(testCaseAttributeNumberOfAsserts); XmlAttribute testCaseAttributeTestClassName = xmlDoc.CreateAttribute("classname"); testCaseAttributeTestClassName.Value = tcResult.TestClassName; testCaseNode.Attributes.Append(testCaseAttributeTestClassName); XmlAttribute testCaseAttributeStatus = xmlDoc.CreateAttribute("status"); testCaseAttributeStatus.Value = tcResult.TestStatus; testCaseNode.Attributes.Append(testCaseAttributeStatus); if (tcResult.TestStatus.Equals("SKIP")) { // <skipped> XmlElement testCaseSkippedNode = xmlDoc.CreateElement("skipped"); // Append <skipped> to <testcase> testCaseNode.AppendChild(testCaseSkippedNode); } else if (tcResult.TestStatus.Equals("FAIL")) { // <failure> XmlElement failureNode = xmlDoc.CreateElement("failure"); // <failure> attributes XmlAttribute failureAttributeMessage = xmlDoc.CreateAttribute("message"); failureAttributeMessage.Value = tcResult.FailureMessage; failureNode.Attributes.Append(failureAttributeMessage); XmlAttribute failureAttributeType = xmlDoc.CreateAttribute("type"); failureAttributeType.Value = tcResult.AssertType; failureNode.Attributes.Append(failureAttributeType); // Append <failure> to <testcase> testCaseNode.AppendChild(failureNode); } // Append <testcase> to <testesuite> testSuiteNode.AppendChild(testCaseNode); } // Append <testsuite> to <testsuites> testSuitesNode.AppendChild(testSuiteNode); } BeautifyAndWriteToFile(xmlDoc, filePath); }