/// <summary> /// This is the main method that is used to start automated test execution /// </summary> /// <param name="automatedTestRun">The automated test run object</param> /// <param name="projectId">The id of the project</param> /// <returns>Either the populated test run or an exception</returns> public AutomatedTestRun4 StartExecution(AutomatedTestRun4 automatedTestRun, int projectId) { //Set status to OK base.status = EngineStatus.OK; try { if (Properties.Settings.Default.TraceLogging) { LogEvent("Starting test execution", EventLogEntryType.Information); } DateTime startDate = DateTime.Now; /* * TODO: Instantiate the code/API used to access the external testing system */ //See if we have any parameters we need to pass to the automation engine Dictionary <string, string> parameters = new Dictionary <string, string>(); if (automatedTestRun.Parameters == null) { if (Properties.Settings.Default.TraceLogging) { LogEvent("Test Run has no parameters", EventLogEntryType.Information); } } else { if (Properties.Settings.Default.TraceLogging) { LogEvent("Test Run has parameters", EventLogEntryType.Information); } foreach (TestRunParameter testRunParameter in automatedTestRun.Parameters) { string parameterName = testRunParameter.Name.ToLowerInvariant(); if (!parameters.ContainsKey(parameterName)) { //Make sure the parameters are lower case if (Properties.Settings.Default.TraceLogging) { LogEvent("Adding test run parameter " + parameterName + " = " + testRunParameter.Value, EventLogEntryType.Information); } parameters.Add(parameterName, testRunParameter.Value); } } } //See if we have an attached or linked test script if (automatedTestRun.Type == AutomatedTestRun4.AttachmentType.URL) { //The "URL" of the test is actually the full file path of the file that contains the test script //Some automation engines need additional parameters which can be provided by allowing the test script filename //to consist of multiple elements separated by a specific character. //Conventionally, most engines use the pipe (|) character to delimit the different elements //To make it easier, we have certain shortcuts that can be used in the path //This allows the same test to be run on different machines with different physical folder layouts string path = automatedTestRun.FilenameOrUrl; path = path.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); path = path.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); path = path.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); path = path.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); path = path.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); //First make sure that the file exists if (File.Exists(path)) { if (Properties.Settings.Default.TraceLogging) { LogEvent("Executing " + Constants.EXTERNAL_SYSTEM_NAME + " test located at " + path, EventLogEntryType.Information); } /* * TODO: Add the external-tool specific code to actually run the test using these values: * -path - The path of the test case to execute * -parameters - A dictionary of parameters to use (if the engine supports parameters) */ } else { throw new FileNotFoundException("Unable to find a " + Constants.EXTERNAL_SYSTEM_NAME + " test at " + path); } } else { //We have an embedded script which we need to send to the test execution engine //If the automation engine doesn't support embedded/attached scripts, throw the following exception: /* * throw new InvalidOperationException("The " + Constants.EXTERNAL_SYSTEM_NAME + " automation engine only supports linked test scripts"); * */ //First we need to get the test script if (automatedTestRun.TestScript == null || automatedTestRun.TestScript.Length == 0) { throw new ApplicationException("The provided " + Constants.EXTERNAL_SYSTEM_NAME + " test script is empty, aborting test execution"); } string testScript = Encoding.UTF8.GetString(automatedTestRun.TestScript); /* * TODO: Add the external-tool specific code to actually run the test script using these values: * -testScript - The text of the actual test script to execute * -parameters - A dictionary of parameters to use (if the engine supports parameters) */ } //Capture the time that it took to run the test DateTime endDate = DateTime.Now; //Now extract the test results /* * TODO: Need to write the code to actually extract the results from the external testing tool * and transform them into the format expected by SpiraTest and RemoteLaunch. * - externalTestStatus * - externalTestSummary * - externalTestDetailedResults */ string externalTestStatus = "Passed"; //TODO: Replace with real values string externalTestSummary = "5 passed, 4 errors, 3 warnings, 2 informational"; //TODO: Replace with real values string externalTestDetailedResults = ""; //TODO: Replace with real values //Populate the Test Run object with the results if (String.IsNullOrEmpty(automatedTestRun.RunnerName)) { automatedTestRun.RunnerName = this.ExtensionName; } automatedTestRun.RunnerTestName = Path.GetFileNameWithoutExtension(automatedTestRun.FilenameOrUrl); //Convert the status for use in SpiraTest /* * TODO: Change the CASE statement to match the statuses that the external tool uses */ AutomatedTestRun4.TestStatusEnum executionStatus = AutomatedTestRun4.TestStatusEnum.NotRun; switch (externalTestStatus) { case "PASSED": executionStatus = AutomatedTestRun4.TestStatusEnum.Passed; break; case "BLOCKED": executionStatus = AutomatedTestRun4.TestStatusEnum.Blocked; break; case "FAILED": executionStatus = AutomatedTestRun4.TestStatusEnum.Failed; break; case "CAUTION": executionStatus = AutomatedTestRun4.TestStatusEnum.Caution; break; } //Specify the start/end dates automatedTestRun.StartDate = startDate; automatedTestRun.EndDate = endDate; //The result log automatedTestRun.ExecutionStatus = executionStatus; automatedTestRun.RunnerMessage = externalTestSummary; automatedTestRun.RunnerStackTrace = externalTestDetailedResults; //The format of the stack trace automatedTestRun.Format = AutomatedTestRun4.TestRunFormat.HTML; automatedTestRun.Format = AutomatedTestRun4.TestRunFormat.PlainText; /* * TODO: Comment out the format that's not being used */ //Populate any test steps on the test run automatedTestRun.TestRunSteps = new List <TestRunStep4>(); int position = 1; /* * TODO: Use the following code in a for...next loop to add test runs for each returned test operation */ //Create the test step TestRunStep4 testRunStep = new TestRunStep4(); testRunStep.Description = "Description"; testRunStep.ExpectedResult = "ExpectedResult"; testRunStep.ActualResult = "ActualResult"; testRunStep.SampleData = "SampleData"; //TODO: Convert the status to the appropriate enumeration value testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.Passed; //Add the test step testRunStep.Position = position++; automatedTestRun.TestRunSteps.Add(testRunStep); //Populate any screenshots being added to the test run automatedTestRun.Screenshots = new List <TestRunScreenshot4>(); /* * TODO: Use the following code in a for...next loop to add attachments for each captured screenshot * Replace the byte[] image = null with actual code for retrieving and populating the screenshot image */ TestRunScreenshot4 screenshot = new TestRunScreenshot4(); byte[] image = null; screenshot.Data = image; screenshot.Filename = "Screenshot.png"; screenshot.Description = "Description of screenshot"; automatedTestRun.Screenshots.Add(screenshot); //Report as complete base.status = EngineStatus.OK; return(automatedTestRun); } catch (Exception exception) { //Log the error and denote failure LogEvent(exception.Message + " (" + exception.StackTrace + ")", EventLogEntryType.Error); //Report as completed with error base.status = EngineStatus.Error; throw exception; } }
/// <summary> /// This is the main method that is used to start automated test execution /// </summary> /// <param name="projectId">The id of the project</param> /// <param name="automatedTestRun">The automated test run object</param> /// <returns>Either the populated test run or an exception</returns> public AutomatedTestRun4 StartExecution(AutomatedTestRun4 automatedTestRun, int projectId) { //Set status to OK base.status = EngineStatus.OK; try { //Instantiate the command-line runner wrapper class CommandLineRunner commandLineRunner = new CommandLineRunner(); if (TRACE_LOGGING_ENABLED) { LogEvent("Starting test execution", EventLogEntryType.Information); } //Pass the application log handle commandLineRunner.ApplicationLog = this.applicationLog; //See if we have an attached or linked test script //The parameters work differently for the two types string arguments = ""; string path = ""; string testName = ""; if (automatedTestRun.Type == AutomatedTestRun4.AttachmentType.URL) { /* * The "URL" of the test case is the full command line including the name of the application to run * with the test script to execute being one of the arguments * If they want to provide any arguments and parameters, they need to specify them separated by a pipe (|) * (i.e. Execution Path|[Arguments]|[Parameter Mask]) * * e.g. [ProgramFiles]\MyCommand.exe|-execute MyScript.txt -arg1 -arg2|-name:value * * would become: * * C:\Program Files\MyCommand.exe -execute MyScript.txt -arg1 -arg2 -param1:value1 - param2:value2 * * If you specify [ProjectId], [TestCaseId], [TestRunId], [TestSetId] or [ReleaseId] in the list of parameters * they will be replaced by the appropriate ID (if a value is set) * */ //See if we have any pipes in the 'filename' that contains arguments or parameters string[] filenameElements = automatedTestRun.FilenameOrUrl.Split('|'); testName = filenameElements[0]; if (testName.Length > 50) { testName = testName.Substring(0, 50); } //To make it easier, we have certain shortcuts that can be used in the path path = filenameElements[0]; path = path.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); path = path.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); path = path.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); path = path.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); path = path.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); //See if we have any arguments (not parameters) if (filenameElements.Length > 1) { arguments = filenameElements[1]; //Replace any special folders in the arguments as well arguments = arguments.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); arguments = arguments.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); arguments = arguments.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); arguments = arguments.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); arguments = arguments.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); //Replace the special test case and test run id tokens arguments = arguments.Replace("[TestCaseId]", automatedTestRun.TestCaseId.ToString()); arguments = arguments.Replace("[TestRunId]", automatedTestRun.TestRunId.ToString()); arguments = arguments.Replace("[ProjectId]", projectId.ToString()); if (automatedTestRun.TestSetId.HasValue) { arguments = arguments.Replace("[TestSetId]", automatedTestRun.TestSetId.Value.ToString()); } if (automatedTestRun.ReleaseId.HasValue) { arguments = arguments.Replace("[ReleaseId]", automatedTestRun.ReleaseId.Value.ToString()); } } //See if we have a parameter mask if (filenameElements.Length > 2) { string parameterMask = filenameElements[2]; //Now iterate through the provided parameters and add to the arguments based on the mask if (automatedTestRun.Parameters != null) { foreach (TestRunParameter parameter in automatedTestRun.Parameters) { string parameterArgument = parameterMask.Replace("name", parameter.Name); parameterArgument = parameterArgument.Replace("value", parameter.Value); arguments += " " + parameterArgument; } } } } else { //We have an embedded script which we need to save onto the file system so that it can be executed //by the command-line tool /* * The filename of the test case is the full path of the application to run * with the test script to execute being one of the arguments, specified as {filename} * If they want to provide any arguments, they need to specify them separated by a pipe (|) * (i.e. Execution Path|[Arguments]) * * e.g. [ProgramFiles]\MyCommand.exe|-execute "{filename}" -arg1 -arg2 * * would become: * * C:\Program Files\MyCommand.exe -execute "C:\Documents And Settings\Username\Local Settings\Application Data\MyScript.txt" -arg1 -arg2 * * In this mode, the parameters are used to replace tokens in the actual test script rather than * being passed to the command-line handler * * If you specify [ProjectId], [TestCaseId], [TestRunId], [TestSetId] or [ReleaseId] in the list of parameters * they will be replaced by the appropriate ID (if a value is set) * */ //We store the script in a temp file in a remote launch folder string outputFolder = System.Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + @"\RemoteLaunch"; if (!Directory.Exists(outputFolder)) { Directory.CreateDirectory(outputFolder); } //First we need to get the test script if (automatedTestRun.TestScript == null || automatedTestRun.TestScript.Length == 0) { throw new ApplicationException("The provided test script is empty, aborting test execution"); } string testScript = Encoding.UTF8.GetString(automatedTestRun.TestScript); //Replace any parameters (in ${parametername} lower-case syntax) if (automatedTestRun.Parameters != null) { foreach (TestRunParameter parameter in automatedTestRun.Parameters) { testScript = testScript.Replace(CreateParameterToken(parameter.Name), parameter.Value); } } //Replace the special test case and test run id tokens testScript = testScript.Replace("[TestCaseId]", automatedTestRun.TestCaseId.ToString()); testScript = testScript.Replace("[TestRunId]", automatedTestRun.TestRunId.ToString()); testScript = testScript.Replace("[ProjectId]", projectId.ToString()); if (automatedTestRun.TestSetId.HasValue) { testScript = testScript.Replace("[TestSetId]", automatedTestRun.TestSetId.Value.ToString()); } if (automatedTestRun.ReleaseId.HasValue) { testScript = testScript.Replace("[ReleaseId]", automatedTestRun.ReleaseId.Value.ToString()); } //Now we need to put the test script into this folder string testScriptPath = outputFolder + @"\CommandLineEngine.txt"; StreamWriter streamWriter = new StreamWriter(testScriptPath); streamWriter.Write(testScript); streamWriter.Flush(); streamWriter.Close(); //See if we have any pipes in the 'filename' that contains arguments or parameters string[] filenameElements = automatedTestRun.FilenameOrUrl.Split('|'); testName = filenameElements[0]; if (testName.Length > 50) { testName = testName.Substring(0, 50); } //To make it easier, we have certain shortcuts that can be used in the path path = filenameElements[0]; path = path.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); path = path.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); path = path.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); path = path.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); path = path.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); //See if we have any arguments if (filenameElements.Length > 1) { arguments = filenameElements[1]; //Replace any special folders in the arguments as well arguments = arguments.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); arguments = arguments.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); arguments = arguments.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); arguments = arguments.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); arguments = arguments.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); arguments = arguments.Replace("{filename}", testScriptPath); arguments = arguments.Replace("[filename]", testScriptPath); //Replace the special test case and test run id tokens arguments = arguments.Replace("[TestCaseId]", automatedTestRun.TestCaseId.ToString()); arguments = arguments.Replace("[TestRunId]", automatedTestRun.TestRunId.ToString()); arguments = arguments.Replace("[ProjectId]", projectId.ToString()); if (automatedTestRun.TestSetId.HasValue) { arguments = arguments.Replace("[TestSetId]", automatedTestRun.TestSetId.Value.ToString()); } if (automatedTestRun.ReleaseId.HasValue) { arguments = arguments.Replace("[ReleaseId]", automatedTestRun.ReleaseId.Value.ToString()); } } } //Actually run the command-line test DateTime startDate = DateTime.Now; string results = commandLineRunner.Execute(path, arguments, Properties.Settings.Default.LogResults); DateTime endDate = DateTime.Now; //See if we want to log the results or not //This is useful when launching a process that knows how to send results back to SpiraTest itself //(e.g. a NUnit test suite) if (Properties.Settings.Default.LogResults) { //Now extract the test results and populate the test run object if (String.IsNullOrEmpty(automatedTestRun.RunnerName)) { automatedTestRun.RunnerName = this.ExtensionName; } //Specify the start/end dates automatedTestRun.StartDate = startDate; automatedTestRun.EndDate = endDate; //Put the filename as the 'test name' automatedTestRun.RunnerTestName = testName; //We use the Regexes to determine the status automatedTestRun.ExecutionStatus = (AutomatedTestRun4.TestStatusEnum)Properties.Settings.Default.DefaultStatus; Regex passRegex = new Regex(Properties.Settings.Default.PassRegex, RegexOptions.IgnoreCase); Regex failRegex = new Regex(Properties.Settings.Default.FailRegex, RegexOptions.IgnoreCase); Regex cautionRegex = new Regex(Properties.Settings.Default.CautionRegex, RegexOptions.IgnoreCase); Regex blockedRegex = new Regex(Properties.Settings.Default.BlockedRegex, RegexOptions.IgnoreCase); //Check passed if (passRegex.IsMatch(results)) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Passed; } if (cautionRegex.IsMatch(results)) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Caution; } if (failRegex.IsMatch(results)) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Failed; } if (blockedRegex.IsMatch(results)) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Blocked; } if (results.Length > 50) { automatedTestRun.RunnerMessage = results.Substring(0, 50); } else { automatedTestRun.RunnerMessage = results; } automatedTestRun.Format = AutomatedTestRun4.TestRunFormat.PlainText; automatedTestRun.RunnerStackTrace = results; automatedTestRun.RunnerAssertCount = (automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.Passed) ? 0 : 1; } else { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.NotRun; } //Report as complete base.status = EngineStatus.OK; return(automatedTestRun); } catch (Exception exception) { //Log the error and denote failure LogEvent(exception.Message + " (" + exception.StackTrace + ")", EventLogEntryType.Error); //Report as completed with error base.status = EngineStatus.Error; throw exception; } }
/// <summary> /// This is the main method that is used to start automated test execution /// </summary> /// <param name="automatedTestRun">The automated test run object</param> /// <param name="projectId">The id of the project</param> /// <returns>Either the populated test run or an exception</returns> public AutomatedTestRun4 StartExecution(AutomatedTestRun4 automatedTestRun, int projectId) { //Set status to OK base.status = EngineStatus.OK; string externalTestDetailedResults = ""; string sCompletePath = ""; try { if (Properties.Settings.Default.TraceLogging) { LogEvent("Starting test execution", EventLogEntryType.Information); } DateTime startDate = DateTime.Now; //See if we have any parameters we need to pass to the automation engine Dictionary <string, string> parameters = new Dictionary <string, string>(); if (automatedTestRun.Parameters == null) { if (Properties.Settings.Default.TraceLogging) { LogEvent("Test Run has no parameters", EventLogEntryType.Information); } } else { if (Properties.Settings.Default.TraceLogging) { LogEvent("Test Run has parameters", EventLogEntryType.Information); } foreach (TestRunParameter testRunParameter in automatedTestRun.Parameters) { string parameterName = testRunParameter.Name.Trim(); if (!parameters.ContainsKey(parameterName)) { //Make sure the parameters are lower case if (Properties.Settings.Default.TraceLogging) { LogEvent("Adding test run parameter " + parameterName + " = " + testRunParameter.Value, EventLogEntryType.Information); } parameters.Add(parameterName, testRunParameter.Value); } } } string runnerTestName = "Unknown"; //See if we have an attached or linked test script if (automatedTestRun.Type == AutomatedTestRun4.AttachmentType.URL) { //The "URL" of the test is actually the full file path of the file that contains the test script //Some automation engines need additional parameters which can be provided by allowing the test script filename //to consist of multiple elements separated by a specific character. //Conventionally, most engines use the pipe (|) character to delimit the different elements string[] elements = automatedTestRun.FilenameOrUrl.Split('|'); //To make it easier, we have certain shortcuts that can be used in the path //This allows the same test to be run on different machines with different physical folder layouts string path = elements[0]; runnerTestName = Path.GetFileNameWithoutExtension(path); string args = ""; if (elements.Length > 1) { args = elements[1]; } path = path.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); path = path.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); path = path.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); path = path.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); path = path.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); //First make sure that the file exists if (File.Exists(path)) { if (Properties.Settings.Default.TraceLogging) { LogEvent("Executing " + Constants.EXTERNAL_SYSTEM_NAME + " test located at " + path, EventLogEntryType.Information); } //Get the working dir string workingDir = Path.GetDirectoryName(path); //Create the folder that will be used to store the output file string sResultFile = "TS" + automatedTestRun.TestSetId + "_TC" + automatedTestRun.TestCaseId; string directory = Properties.Settings.Default.ResultPath + "\\" + GetTimestamp(DateTime.Now) + "_" + sResultFile + "\\"; sCompletePath = directory + sResultFile + ".rxlog"; CreateDirectory(new DirectoryInfo(directory)); // start the process and wait for exit ProcessStartInfo startInfo = new ProcessStartInfo(); StringBuilder builder = new StringBuilder("/rf:\"" + sCompletePath + "\""); //Convert the parameters into Ranorex format // /param:Parameter1="New Value" foreach (KeyValuePair <string, string> parameter in parameters) { builder.Append(" /param:" + parameter.Key + "=\"" + parameter.Value + "\""); } //Add on any user-specified arguments if (!String.IsNullOrWhiteSpace(args)) { builder.Append(" " + args); } startInfo.Arguments = builder.ToString(); startInfo.FileName = path; startInfo.WorkingDirectory = workingDir; startInfo.RedirectStandardOutput = true; startInfo.UseShellExecute = false; Process p = Process.Start(startInfo); externalTestDetailedResults = String.Format("Executing: {0} in '{1}' with arguments '{2}'\n", startInfo.FileName, startInfo.WorkingDirectory, startInfo.Arguments); externalTestDetailedResults += p.StandardOutput.ReadToEnd(); p.WaitForExit(); p.Close(); } else { throw new FileNotFoundException("Unable to find a " + Constants.EXTERNAL_SYSTEM_NAME + " test at " + path); } } else { //We have an embedded script which we need to send to the test execution engine //If the automation engine doesn't support embedded/attached scripts, throw the following exception: throw new InvalidOperationException("The " + Constants.EXTERNAL_SYSTEM_NAME + " automation engine only supports linked test scripts"); } //Capture the time that it took to run the test DateTime endDate = DateTime.Now; //Now extract the test results //Ranorex saves the XML data in a .rxlog.data file XmlDocument doc = new XmlDocument(); doc.Load(sCompletePath + ".data"); // Select the first book written by an author whose last name is Atwood. XmlNode result = doc.DocumentElement.SelectSingleNode("/report/activity"); string externalTestStatus = result.Attributes["result"].Value; string externalTestSummary = ""; XmlNodeList errorMessages = doc.DocumentElement.SelectNodes(".//errmsg"); if (errorMessages.Count > 0) { externalTestSummary = ""; foreach (XmlNode error in errorMessages) { externalTestSummary = externalTestSummary + error.InnerText + "\n"; } } else { externalTestSummary = externalTestStatus; } //Populate the Test Run object with the results if (String.IsNullOrEmpty(automatedTestRun.RunnerName)) { automatedTestRun.RunnerName = this.ExtensionName; } automatedTestRun.RunnerTestName = Path.GetFileNameWithoutExtension(runnerTestName); //Convert the status for use in SpiraTest AutomatedTestRun4.TestStatusEnum executionStatus = AutomatedTestRun4.TestStatusEnum.Passed; switch (externalTestStatus) { case "Success": executionStatus = AutomatedTestRun4.TestStatusEnum.Passed; break; case "Failed": case "Error": executionStatus = AutomatedTestRun4.TestStatusEnum.Failed; break; case "Warn": executionStatus = AutomatedTestRun4.TestStatusEnum.Caution; break; default: executionStatus = AutomatedTestRun4.TestStatusEnum.Blocked; break; } //Specify the start/end dates automatedTestRun.StartDate = startDate; automatedTestRun.EndDate = endDate; //The result log automatedTestRun.ExecutionStatus = executionStatus; automatedTestRun.RunnerMessage = externalTestSummary; automatedTestRun.RunnerStackTrace = externalTestDetailedResults; automatedTestRun.Format = AutomatedTestRun4.TestRunFormat.PlainText; //Now get the detailed activity as 'test steps' //Also override the status if we find any failures or warnings XmlNodeList resultItems = doc.SelectNodes("/report/activity[@type='root']//activity/item"); if (resultItems != null && resultItems.Count > 0) { automatedTestRun.TestRunSteps = new List <TestRunStep4>(); int position = 1; foreach (XmlNode xmlItem in resultItems) { string category = xmlItem.Attributes["category"].Value; string level = xmlItem.Attributes["level"].Value; string message = ""; XmlNode xmlMessage = xmlItem.SelectSingleNode("message"); if (xmlMessage != null) { message = xmlMessage.InnerText; } //Create the test step TestRunStep4 testRunStep = new TestRunStep4(); testRunStep.Description = category; testRunStep.ExpectedResult = ""; testRunStep.ActualResult = message; testRunStep.SampleData = ""; //Convert the status to the appropriate enumeration value testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.NotRun; switch (level) { case "Success": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.Passed; break; case "Info": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.NotApplicable; break; case "Warn": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.Caution; if (automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.Passed || automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.NotRun || automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.NotApplicable) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Caution; } break; case "Failure": case "Error": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.Failed; if (automatedTestRun.ExecutionStatus != AutomatedTestRun4.TestStatusEnum.Failed) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Failed; } break; } //Add the test step testRunStep.Position = position++; automatedTestRun.TestRunSteps.Add(testRunStep); } } //Report as complete base.status = EngineStatus.OK; return(automatedTestRun); } catch (Exception exception) { //Log the error and denote failure LogEvent(exception.Message + " (" + exception.StackTrace + ")", EventLogEntryType.Error); //Report as completed with error base.status = EngineStatus.Error; throw exception; } }
/// <summary> /// This is the main method that is used to start automated test execution /// </summary> /// <param name="automatedTestRun">The automated test run object</param> /// <returns>Either the populated test run or an exception</returns> /// <param name="projectId">The id of the project</param> public AutomatedTestRun4 StartExecution(AutomatedTestRun4 automatedTestRun, int projectId) { //Set status to OK base.status = EngineStatus.OK; try { if (Properties.Settings.Default.TraceLogging && applicationLog != null) { applicationLog.WriteEntry("SoapUIEngine.StartExecution: Entering", EventLogEntryType.Information); } if (automatedTestRun == null) { throw new InvalidOperationException("The automatedTestRun object provided was null"); } //Instantiate the SOAP-UI runner wrapper class TestRunner soapUiRunner = new TestRunner(Properties.Settings.Default.Location); //Set the license type (pro vs. free) soapUiRunner.SupportsDataExport = Properties.Settings.Default.ProLicense; //Specify if this is a load test or not soapUiRunner.IsLoadTest = Properties.Settings.Default.LoadTest; if (Properties.Settings.Default.TraceLogging && applicationLog != null) { LogEvent("Starting test execution", EventLogEntryType.Information); } soapUiRunner.TraceLogging = Properties.Settings.Default.TraceLogging; //Pass the application log handle soapUiRunner.ApplicationLog = this.applicationLog; //See if we have an attached or linked test script //For squish we only support linked test cases if (automatedTestRun.Type == AutomatedTestRun4.AttachmentType.URL) { //The "URL" of the test is a combination of project filename, project suite name and test case name: //Project File Name|Test Suite Name|Test Case Name //e.g. [MyDocuments]\SpiraTest-3-0-Web-Service-soapui-project.xml|Requirements Testing|Get Requirements //See if we have any pipes in the 'filename' that include additional options string[] filenameElements = automatedTestRun.FilenameOrUrl.Split('|'); //Make sure we have all three elements (the fourth is optional) if (filenameElements.Length < 3) { throw new ArgumentException(String.Format("You need to provide a project file, test suite and test case name separated by pipe (|) characters. Only {0} elements were provided.", filenameElements.Length)); } //To make it easier, we have certain shortcuts that can be used in the path string path = filenameElements[0]; path = path.Replace("[MyDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments)); path = path.Replace("[CommonDocuments]", Environment.GetFolderPath(System.Environment.SpecialFolder.CommonDocuments)); path = path.Replace("[DesktopDirectory]", Environment.GetFolderPath(System.Environment.SpecialFolder.DesktopDirectory)); path = path.Replace("[ProgramFiles]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles)); path = path.Replace("[ProgramFilesX86]", Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86)); //First make sure that the File exists, or folder if it's a composite project if (File.Exists(path) || Directory.Exists(path)) { //Set the path, test suite and test case on the runner soapUiRunner.ProjectPath = path; soapUiRunner.TestSuite = filenameElements[1]; soapUiRunner.TestCase = filenameElements[2]; if (filenameElements.Length > 3) { soapUiRunner.OtherCommandLineSwitches = filenameElements[3]; } } else { throw new ArgumentException(String.Format("The provided project filepath '{0}' does not exist on the host!", path)); } } else { //We have an embedded script which we need to execute directly //This is not currently supported since SOAP-UI uses XML files which cannot be easily edited in SpiraTest throw new InvalidOperationException("The SOAP-UI automation engine only supports linked test scripts"); } //Actually run the test DateTime startDate = DateTime.Now; TestRunnerOutput output; //See if we have any parameters we need to pass if (automatedTestRun.Parameters == null) { if (Properties.Settings.Default.TraceLogging && applicationLog != null) { LogEvent("Test Run has no parameters", EventLogEntryType.Information); } //Run the test output = soapUiRunner.Execute(); } else { if (Properties.Settings.Default.TraceLogging && applicationLog != null) { LogEvent("Test Run has parameters", EventLogEntryType.Information); } Dictionary <string, string> parameters = new Dictionary <string, string>(); foreach (TestRunParameter testRunParameter in automatedTestRun.Parameters) { string parameterName = testRunParameter.Name.ToLowerInvariant(); if (!parameters.ContainsKey(parameterName)) { //Make sure the parameters are lower case for comparing later if (Properties.Settings.Default.TraceLogging && applicationLog != null) { LogEvent("Adding test run parameter " + parameterName + " = " + testRunParameter.Value, EventLogEntryType.Information); } parameters.Add(parameterName, testRunParameter.Value); } } //Run the test with parameters output = soapUiRunner.Execute(parameters); } DateTime endDate = DateTime.Now; //Specify the start/end dates automatedTestRun.StartDate = startDate; automatedTestRun.EndDate = endDate; automatedTestRun.RunnerTestName = soapUiRunner.TestSuite + " / " + soapUiRunner.TestCase; //Need to parse the results //SoapUI 3.6.1 TestCaseRunner Summary //----------------------------- //Time Taken: 837ms //Total TestSuites: 0 //Total TestCases: 1 (0 failed) //Total TestSteps: 3 //Total Request Assertions: 6 //Total Failed Assertions: 0 //Total Exported Results: 3 //Parse the summary result log string[] lines = output.ConsoleOutput.Split('\n'); int stepsCount = 0; int assertions = 0; int failedAssertions = 0; int failedTestCases = 0; foreach (string line in lines) { //Extract the various counts if (line.StartsWith("Total TestSteps:")) { string value = line.Substring("Total TestSteps:".Length).Trim(); int intValue; if (Int32.TryParse(value, out intValue)) { stepsCount = intValue; } } if (line.StartsWith("Total Request Assertions:")) { string value = line.Substring("Total Request Assertions:".Length).Trim(); int intValue; if (Int32.TryParse(value, out intValue)) { assertions = intValue; } } if (line.StartsWith("Total Failed Assertions:")) { string value = line.Substring("Total Failed Assertions:".Length).Trim(); int intValue; if (Int32.TryParse(value, out intValue)) { failedAssertions = intValue; } } //Use Regex to parse the number of failed test cases Regex regex = new Regex(@"^Total TestCases: (\d+) \((\d) failed\)"); if (regex.IsMatch(line)) { Match match = regex.Match(line); if (match != null && match.Groups.Count >= 3) { int intValue; if (Int32.TryParse(match.Groups[2].Value, out intValue)) { failedTestCases = intValue; } } } } automatedTestRun.ExecutionStatus = (failedAssertions > 0 || failedTestCases > 0) ? AutomatedTestRun4.TestStatusEnum.Failed : AutomatedTestRun4.TestStatusEnum.Passed; automatedTestRun.RunnerMessage = String.Format("{0} test steps completed with {1} request assertions, {2} failed assertions and {3} failed test cases.", stepsCount, assertions, failedAssertions, failedTestCases); automatedTestRun.RunnerStackTrace = output.ConsoleOutput; automatedTestRun.RunnerAssertCount = failedAssertions; //If we have an instance of SOAP-UI Pro, there are more detailed XML reports available // //SOAP-UI TEST //<TestCaseTestStepResults> // <result> // <message>Step 0 [Authenticate] OK: took 617 ms</message> // <name>Authenticate</name> // <order>1</order> // <started>23:03:30.871</started> // <status>OK</status> // <timeTaken>617</timeTaken> // </result> //</TestCaseTestStepResults> // //LOAD-UI TEST //<LoadTestLog> //<entry> // <discarded>false</discarded> // <error>false</error> // <message>LoadTest started at Tue Jun 24 11:31:35 EST 2014</message> // <timeStamp>1403573495035</timeStamp> // <type>Message</type> //</entry> //<entry> // <discarded>false</discarded> // <error>true</error> // <message><![CDATA[TestStep [tc13_listAccountBalanceHistoryV1] result status is FAILED; [[RegExp assertion (from spreadsheet)] assert strResponse.contains("<ClientID>") // | | // | false // <?xml version="1.0" encoding="UTF-8"?><SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"><SOAP-ENV:Body><ns0:ListAccountBalanceHistoryResponseV1 xmlns:ns0="http://www.qsuper.com.au/services/business/AccountEnquiryV1"><ns0:Status>Success</ns0:Status><ns0:ResponsePayload><ns0:Client><ClientID xmlns="">194919272</ClientID></ns0:Client></ns0:ResponsePayload></ns0:ListAccountBalanceHistoryResponseV1></SOAP-ENV:Body></SOAP-ENV:Envelope>] [threadIndex=0]]]></message> // <targetStepName>tc13_listAccountBalanceHistoryV1</targetStepName> // <timeStamp>1403573495573</timeStamp> // <type>Step Status</type> //</entry> //</LoadTestLog> if (Properties.Settings.Default.ProLicense) { if (output.XmlOutput != null) { bool errorFound = false; //Sometimes failure will be in the steps automatedTestRun.TestRunSteps = new List <TestRunStep4>(); int position = 1; //See if we have a load test or not if (Properties.Settings.Default.LoadTest) { XmlNodeList xmlNodes = output.XmlOutput.SelectNodes("LoadTestLog/entry"); foreach (XmlNode xmlNode in xmlNodes) { //Add the message to the stack-trace string message = xmlNode.SelectSingleNode("message").InnerText; string type = xmlNode.SelectSingleNode("type").InnerText; string targetStepName = ""; if (xmlNode.SelectSingleNode("targetStepName") != null) { targetStepName = xmlNode.SelectSingleNode("targetStepName").InnerText; } string error = xmlNode.SelectSingleNode("error").InnerText; //Add the 'test step' TestRunStep4 testRunStep = new TestRunStep4(); testRunStep.ExecutionStatusId = (error == "true") ? (int)AutomatedTestRun4.TestStatusEnum.Failed : (int)AutomatedTestRun4.TestStatusEnum.Passed; testRunStep.Description = type + ": " + targetStepName; testRunStep.ActualResult = message; testRunStep.Position = position++; automatedTestRun.TestRunSteps.Add(testRunStep); if (error == "true") { errorFound = true; } } } else { XmlNodeList xmlNodes = output.XmlOutput.SelectNodes("TestCaseTestStepResults/result"); foreach (XmlNode xmlNode in xmlNodes) { //Get the message string message = xmlNode.SelectSingleNode("message").InnerText; //Add the 'test step' string status = xmlNode.SelectSingleNode("status").InnerText; TestRunStep4 testRunStep = new TestRunStep4(); switch (status) { case "OK": case "PASS": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.Passed; break; case "FAIL": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.Failed; break; case "UNKNOWN": testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.NotApplicable; break; default: testRunStep.ExecutionStatusId = (int)AutomatedTestRun4.TestStatusEnum.NotRun; errorFound = true; break; } testRunStep.Description = message; testRunStep.ActualResult = status; testRunStep.Position = position++; automatedTestRun.TestRunSteps.Add(testRunStep); } } if (errorFound && (automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.Passed || automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.NotRun || automatedTestRun.ExecutionStatus == AutomatedTestRun4.TestStatusEnum.Caution)) { automatedTestRun.ExecutionStatus = AutomatedTestRun4.TestStatusEnum.Failed; } } else { if (applicationLog != null) { LogEvent("Unable to access the SOAP-UI Pro Detailed XML Log File", EventLogEntryType.Error); } } } if (Properties.Settings.Default.TraceLogging && applicationLog != null) { applicationLog.WriteEntry("SoapUIEngine.StartExecution: Entering", EventLogEntryType.Information); } //Report as complete base.status = EngineStatus.OK; return(automatedTestRun); } catch (Exception exception) { //Log the error and denote failure LogEvent(exception.Message + " (" + exception.StackTrace + ")", EventLogEntryType.Error); //Report as completed with error base.status = EngineStatus.Error; throw exception; } }