/// <summary> /// creates instance of the runner given a source. /// </summary> /// <param name="sources"></param> /// <param name="timeout"></param> /// <param name="backgroundWorker"></param> /// <param name="useUFTLicense"></param> public FileSystemTestsRunner(List <string> sources, TimeSpan timeout, int ControllerPollingInterval, TimeSpan perScenarioTimeOut, List <string> ignoreErrorStrings, Dictionary <string, string> jenkinsEnvVariables, string fsAppParamName, string appIdentifier, bool useUFTLicense = false ) { _jenkinsEnvVariables = jenkinsEnvVariables; //search if we have any testing tools installed if (!Helper.IsTestingToolsInstalled(TestStorageType.FileSystem)) { ConsoleWriter.WriteErrLine(string.Format(Resources.FileSystemTestsRunner_No_HP_testing_tool_is_installed_on, System.Environment.MachineName)); Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } _timeout = timeout; _stopwatch = Stopwatch.StartNew(); _pollingInterval = ControllerPollingInterval; _perScenarioTimeOut = perScenarioTimeOut; _ignoreErrorStrings = ignoreErrorStrings; _useUFTLicense = useUFTLicense; _tests = new List <TestInfo>(); //go over all sources, and create a list of all tests foreach (string source in sources) { List <TestInfo> testGroup = new List <TestInfo>(); try { //--handle directories which contain test subdirectories (recursively) if (Helper.IsDirectory(source)) { var testsLocations = Helper.GetTestsLocations(source); foreach (var loc in testsLocations) { var test = new TestInfo(loc, loc, source); testGroup.Add(test); } } //--handle mtb files (which contain links to tests) else //file might be LoadRunner scenario or //mtb file (which contain links to tests) //other files are dropped { testGroup = new List <TestInfo>(); FileInfo fi = new FileInfo(source); if (fi.Extension == Helper.LoadRunnerFileExtention) { testGroup.Add(new TestInfo(source, source, source)); } else if (fi.Extension == ".mtb") //if (source.TrimEnd().EndsWith(".mtb", StringComparison.CurrentCultureIgnoreCase)) { MtbManager manager = new MtbManager(); var paths = manager.Parse(source); foreach (var p in paths) { testGroup.Add(new TestInfo(p, p, source)); } } else if (fi.Extension == ".mtbx") //if (source.TrimEnd().EndsWith(".mtb", StringComparison.CurrentCultureIgnoreCase)) { testGroup = MtbxManager.Parse(source, _jenkinsEnvVariables, source); if (!string.IsNullOrEmpty(fsAppParamName) && !string.IsNullOrEmpty(appIdentifier)) { var testParam = new TestParameterInfo() { Name = fsAppParamName, Type = "string", Value = appIdentifier }; foreach (TestInfo testInfo in testGroup) { testInfo.ParameterList.Add(testParam); } } } } } catch (Exception) { testGroup = new List <TestInfo>(); } //--handle single test dir, add it with no group if (testGroup.Count == 1) { testGroup[0].TestGroup = "<None>"; } _tests.AddRange(testGroup); } if (_tests == null || _tests.Count == 0) { ConsoleWriter.WriteLine(Resources.FsRunnerNoValidTests); Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } ConsoleWriter.WriteLine(string.Format(Resources.FsRunnerTestsFound, _tests.Count)); _tests.ForEach(t => ConsoleWriter.WriteLine("" + t.TestName)); ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); }
/// <summary> /// creates the correct runner according to the given type /// </summary> /// <param name="runType"></param> /// <param name="ciParams"></param> IAssetRunner CreateRunner(TestStorageType runType, JavaProperties ciParams, bool initialTestRun) { IAssetRunner runner = null; switch (runType) { case TestStorageType.Alm: //check that all required parameters exist foreach (string param1 in requiredParamsForQcRun) { if (!_ciParams.ContainsKey(param1)) { ConsoleWriter.WriteLine(string.Format(Resources.LauncherParamRequired, param1)); return(null); } } //parse params that need parsing double dblQcTimeout = int.MaxValue; if (!double.TryParse(_ciParams["almTimeout"], out dblQcTimeout)) { ConsoleWriter.WriteLine(Resources.LauncherTimeoutNotNumeric); dblQcTimeout = int.MaxValue; } ConsoleWriter.WriteLine(string.Format(Resources.LuancherDisplayTimout, dblQcTimeout)); QcRunMode enmQcRunMode = QcRunMode.RUN_LOCAL; if (!Enum.TryParse <QcRunMode>(_ciParams["almRunMode"], true, out enmQcRunMode)) { ConsoleWriter.WriteLine(Resources.LauncherIncorrectRunmode); enmQcRunMode = QcRunMode.RUN_LOCAL; } ConsoleWriter.WriteLine(string.Format(Resources.LauncherDisplayRunmode, enmQcRunMode.ToString())); //go over testsets in the parameters, and collect them List <string> sets = GetParamsWithPrefix("TestSet"); if (sets.Count == 0) { ConsoleWriter.WriteLine(Resources.LauncherNoTests); return(null); } //check if filterTests flag is selected; if yes apply filters on the list bool isFilterSelected; string filter = (_ciParams.ContainsKey("FilterTests") ? _ciParams["FilterTests"] : ""); if (string.IsNullOrEmpty(filter)) { isFilterSelected = false; } else { isFilterSelected = Convert.ToBoolean(filter.ToLower()); } string filterByName = (_ciParams.ContainsKey("FilterByName") ? _ciParams["FilterByName"] : ""); string statuses = (_ciParams.ContainsKey("FilterByStatus") ? _ciParams["FilterByStatus"] : ""); List <string> filterByStatuses = new List <string>(); if (statuses != "") { if (statuses.Contains(",")) { filterByStatuses = statuses.Split(',').ToList(); } else { filterByStatuses.Add(statuses); } } //create an Alm runner runner = new AlmTestSetsRunner(_ciParams["almServerUrl"], _ciParams["almUserName"], Decrypt(_ciParams["almPassword"], secretkey), _ciParams["almDomain"], _ciParams["almProject"], dblQcTimeout, enmQcRunMode, _ciParams["almRunHost"], sets, isFilterSelected, filterByName, filterByStatuses, initialTestRun); break; case TestStorageType.FileSystem: //Get displayController var bool displayController = false; if (_ciParams.ContainsKey("displayController")) { if (_ciParams["displayController"] == "1") { displayController = true; } } string analysisTemplate = (_ciParams.ContainsKey("analysisTemplate") ? _ciParams["analysisTemplate"] : ""); List <TestData> validBuildTests = getValidTests("Test", Resources.LauncherNoTestsFound, Resources.LauncherNoValidTests); //add build tests and cleanup tests in correct order List <TestData> validTests = new List <TestData>(); if (!rerunFailedTests) { ConsoleWriter.WriteLine("Run build tests"); //run only the build tests foreach (var item in validBuildTests) { validTests.Add(item); } } else { //add also cleanup tests string fsTestType = (_ciParams.ContainsKey("testType") ? _ciParams["testType"] : ""); List <TestData> validFailedTests = getValidTests("FailedTest", Resources.LauncherNoFailedTestsFound, Resources.LauncherNoValidFailedTests); List <TestData> validCleanupTests = new List <TestData>(); if (getValidTests("CleanupTest", Resources.LauncherNoCleanupTestsFound, Resources.LauncherNoValidCleanupTests).Count > 0) { validCleanupTests = getValidTests("CleanupTest", Resources.LauncherNoCleanupTestsFound, Resources.LauncherNoValidCleanupTests); } List <string> reruns = GetParamsWithPrefix("Reruns"); List <int> numberOfReruns = new List <int>(); foreach (var item in reruns) { numberOfReruns.Add(int.Parse(item)); } bool noRerunsSet = checkReruns(numberOfReruns); int currentRerun; if (noRerunsSet) { ConsoleWriter.WriteLine("In order to rerun the tests the number of reruns should be greater than zero."); } else { for (int i = 0; i < numberOfReruns.Count; i++) { currentRerun = numberOfReruns.ElementAt(i); if (fsTestType.Equals("Of any of the build's tests")) { ConsoleWriter.WriteLine("Rerun the entire test set"); while (currentRerun > 0) { if (validCleanupTests.Count > 0) { validTests.Add(validCleanupTests.ElementAt(i)); } foreach (var item in validFailedTests) { validTests.Add(item); } currentRerun--; } } else { while (currentRerun > 0) { if (validCleanupTests.Count > 0) { validTests.Add(validCleanupTests.ElementAt(i)); } validTests.Add(validFailedTests.ElementAt(i)); currentRerun--; } } } } } //get the tests //IEnumerable<string> tests = GetParamsWithPrefix("Test"); IEnumerable <string> jenkinsEnvVariablesWithCommas = GetParamsWithPrefix("JenkinsEnv"); Dictionary <string, string> jenkinsEnvVariables = new Dictionary <string, string>(); foreach (string var in jenkinsEnvVariablesWithCommas) { string[] nameVal = var.Split(",;".ToCharArray()); jenkinsEnvVariables.Add(nameVal[0], nameVal[1]); } //parse the timeout into a TimeSpan TimeSpan timeout = TimeSpan.MaxValue; if (_ciParams.ContainsKey("fsTimeout")) { string strTimoutInSeconds = _ciParams["fsTimeout"]; if (strTimoutInSeconds.Trim() != "-1") { int intTimoutInSeconds = 0; int.TryParse(strTimoutInSeconds, out intTimoutInSeconds); timeout = TimeSpan.FromSeconds(intTimoutInSeconds); } } ConsoleWriter.WriteLine("Launcher timeout is " + timeout.ToString(@"dd\:\:hh\:mm\:ss")); //LR specific values: //default values are set by JAVA code, in com.hpe.application.automation.tools.model.RunFromFileSystemModel.java int pollingInterval = 30; if (_ciParams.ContainsKey("controllerPollingInterval")) { pollingInterval = int.Parse(_ciParams["controllerPollingInterval"]); } ConsoleWriter.WriteLine("Controller Polling Interval: " + pollingInterval + " seconds"); TimeSpan perScenarioTimeOutMinutes = TimeSpan.MaxValue; if (_ciParams.ContainsKey("PerScenarioTimeOut")) { string strTimoutInMinutes = _ciParams["PerScenarioTimeOut"]; //ConsoleWriter.WriteLine("reading PerScenarioTimeout: "+ strTimoutInMinutes); if (strTimoutInMinutes.Trim() != "-1") { int intTimoutInMinutes = 0; if (int.TryParse(strTimoutInMinutes, out intTimoutInMinutes)) { perScenarioTimeOutMinutes = TimeSpan.FromMinutes(intTimoutInMinutes); } //ConsoleWriter.WriteLine("PerScenarioTimeout: "+perScenarioTimeOutMinutes+" minutes"); } } ConsoleWriter.WriteLine("PerScenarioTimeout: " + perScenarioTimeOutMinutes.ToString(@"dd\:\:hh\:mm\:ss") + " minutes"); char[] delim = { '\n' }; List <string> ignoreErrorStrings = new List <string>(); if (_ciParams.ContainsKey("ignoreErrorStrings")) { if (_ciParams.ContainsKey("ignoreErrorStrings")) { ignoreErrorStrings.AddRange(Array.ConvertAll(_ciParams["ignoreErrorStrings"].Split(delim, StringSplitOptions.RemoveEmptyEntries), ignoreError => ignoreError.Trim())); } } //If a file path was provided and it doesn't exist stop the analysis launcher if (!analysisTemplate.Equals("") && !Helper.FileExists(analysisTemplate)) { return(null); } //--MC connection info McConnectionInfo mcConnectionInfo = new McConnectionInfo(); if (_ciParams.ContainsKey("MobileHostAddress")) { string mcServerUrl = _ciParams["MobileHostAddress"]; if (!string.IsNullOrEmpty(mcServerUrl)) { //url is something like http://xxx.xxx.xxx.xxx:8080 string[] strArray = mcServerUrl.Split(new Char[] { ':' }); if (strArray.Length == 3) { mcConnectionInfo.MobileHostAddress = strArray[1].Replace("/", ""); mcConnectionInfo.MobileHostPort = strArray[2]; } //mc username if (_ciParams.ContainsKey("MobileUserName")) { string mcUsername = _ciParams["MobileUserName"]; if (!string.IsNullOrEmpty(mcUsername)) { mcConnectionInfo.MobileUserName = mcUsername; } } //mc password if (_ciParams.ContainsKey("MobilePassword")) { string mcPassword = _ciParams["MobilePassword"]; if (!string.IsNullOrEmpty(mcPassword)) { mcConnectionInfo.MobilePassword = Decrypt(mcPassword, secretkey); } } //mc tenantId if (_ciParams.ContainsKey("MobileTenantId")) { string mcTenantId = _ciParams["MobileTenantId"]; if (!string.IsNullOrEmpty(mcTenantId)) { mcConnectionInfo.MobileTenantId = mcTenantId; } } //ssl if (_ciParams.ContainsKey("MobileUseSSL")) { string mcUseSSL = _ciParams["MobileUseSSL"]; if (!string.IsNullOrEmpty(mcUseSSL)) { mcConnectionInfo.MobileUseSSL = int.Parse(mcUseSSL); } } //Proxy enabled flag if (_ciParams.ContainsKey("MobileUseProxy")) { string useProxy = _ciParams["MobileUseProxy"]; if (!string.IsNullOrEmpty(useProxy)) { mcConnectionInfo.MobileUseProxy = int.Parse(useProxy); } } //Proxy type if (_ciParams.ContainsKey("MobileProxyType")) { string proxyType = _ciParams["MobileProxyType"]; if (!string.IsNullOrEmpty(proxyType)) { mcConnectionInfo.MobileProxyType = int.Parse(proxyType); } } //proxy address if (_ciParams.ContainsKey("MobileProxySetting_Address")) { string proxyAddress = _ciParams["MobileProxySetting_Address"]; if (!string.IsNullOrEmpty(proxyAddress)) { // data is something like "16.105.9.23:8080" string[] strArray4ProxyAddr = proxyAddress.Split(new Char[] { ':' }); if (strArray4ProxyAddr.Length == 2) { mcConnectionInfo.MobileProxySetting_Address = strArray4ProxyAddr[0]; mcConnectionInfo.MobileProxySetting_Port = int.Parse(strArray4ProxyAddr[1]); } } } //Proxy authentication if (_ciParams.ContainsKey("MobileProxySetting_Authentication")) { string proxyAuthentication = _ciParams["MobileProxySetting_Authentication"]; if (!string.IsNullOrEmpty(proxyAuthentication)) { mcConnectionInfo.MobileProxySetting_Authentication = int.Parse(proxyAuthentication); } } //Proxy username if (_ciParams.ContainsKey("MobileProxySetting_UserName")) { string proxyUsername = _ciParams["MobileProxySetting_UserName"]; if (!string.IsNullOrEmpty(proxyUsername)) { mcConnectionInfo.MobileProxySetting_UserName = proxyUsername; } } //Proxy password if (_ciParams.ContainsKey("MobileProxySetting_Password")) { string proxyPassword = _ciParams["MobileProxySetting_Password"]; if (!string.IsNullOrEmpty(proxyPassword)) { mcConnectionInfo.MobileProxySetting_Password = Decrypt(proxyPassword, secretkey); } } } } // other mobile info string mobileinfo = ""; if (_ciParams.ContainsKey("mobileinfo")) { mobileinfo = _ciParams["mobileinfo"]; } Dictionary <string, List <String> > parallelRunnerEnvironments = new Dictionary <string, List <string> >(); // retrieve the parallel runner environment for each test if (_ciParams.ContainsKey("parallelRunnerMode")) { foreach (var test in validTests) { string envKey = "Parallel" + test.Id + "Env"; List <string> testEnvironments = GetParamsWithPrefix(envKey); // add the environments for all the valid tests parallelRunnerEnvironments.Add(test.Id, testEnvironments); } } // users can provide a custom report path string reportPath = null; if (_ciParams.ContainsKey("fsReportPath")) { reportPath = _ciParams["fsReportPath"]; } SummaryDataLogger summaryDataLogger = GetSummaryDataLogger(); List <ScriptRTSModel> scriptRTSSet = GetScriptRTSSet(); if (_ciParams.ContainsKey("fsUftRunMode")) { string uftRunMode = "Fast"; uftRunMode = _ciParams["fsUftRunMode"]; runner = new FileSystemTestsRunner(validTests, timeout, uftRunMode, pollingInterval, perScenarioTimeOutMinutes, ignoreErrorStrings, jenkinsEnvVariables, mcConnectionInfo, mobileinfo, parallelRunnerEnvironments, displayController, analysisTemplate, summaryDataLogger, scriptRTSSet, reportPath); } else { runner = new FileSystemTestsRunner(validTests, timeout, pollingInterval, perScenarioTimeOutMinutes, ignoreErrorStrings, jenkinsEnvVariables, mcConnectionInfo, mobileinfo, parallelRunnerEnvironments, displayController, analysisTemplate, summaryDataLogger, scriptRTSSet, reportPath); } break; default: runner = null; break; } return(runner); }
/// <summary> /// Opens the report viewer for the given report directory /// </summary> /// <param name="reportDirectory"></param> public static void OpenReport(string reportDirectory) { Helper.OpenReport(reportDirectory, ref _uftViewerPath); }
/// <summary> /// runs all tests given to this runner and returns a suite of run resutls /// </summary> /// <returns>The rest run results for each test</returns> public override TestSuiteRunResults Run() { //create a new Run Results object TestSuiteRunResults activeRunDesc = new TestSuiteRunResults(); double totalTime = 0; try { var start = DateTime.Now; foreach (var test in _tests) { if (RunCancelled()) { break; } var testStart = DateTime.Now; string errorReason = string.Empty; TestRunResults runResult = null; try { runResult = RunHPToolsTest(test, ref errorReason); } catch (Exception ex) { runResult = new TestRunResults(); runResult.TestState = TestState.Error; runResult.ErrorDesc = ex.Message; runResult.TestName = test.TestName; } //get the original source for this test, for grouping tests under test classes runResult.TestGroup = test.TestGroup; activeRunDesc.TestRuns.Add(runResult); //if fail was terminated before this step, continue if (runResult.TestState != TestState.Failed) { if (runResult.TestState != TestState.Error) { Helper.GetTestStateFromReport(runResult); } else { if (string.IsNullOrEmpty(runResult.ErrorDesc)) { if (RunCancelled()) { runResult.ErrorDesc = HpToolsLauncher.Properties.Resources.ExceptionUserCancelled; } else { runResult.ErrorDesc = HpToolsLauncher.Properties.Resources.ExceptionExternalProcess; } } runResult.ReportLocation = null; runResult.TestState = TestState.Error; } } if (runResult.TestState == TestState.Passed && runResult.HasWarnings) { runResult.TestState = TestState.Warning; ConsoleWriter.WriteLine(Resources.FsRunnerTestDoneWarnings); } else { ConsoleWriter.WriteLine(string.Format(Resources.FsRunnerTestDone, runResult.TestState)); } ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Test complete: " + runResult.TestPath + "\n-------------------------------------------------------------------------------------------------------"); UpdateCounters(runResult.TestState); var testTotalTime = (DateTime.Now - testStart).TotalSeconds; } totalTime = (DateTime.Now - start).TotalSeconds; } finally { activeRunDesc.NumTests = _tests.Count; activeRunDesc.NumErrors = _errors; activeRunDesc.TotalRunTime = TimeSpan.FromSeconds(totalTime); activeRunDesc.NumFailures = _fail; foreach (IFileSysTestRunner cleanupRunner in _colRunnersForCleanup.Values) { cleanupRunner.CleanUp(); } } return(activeRunDesc); }
/// <summary> /// creates a correct type of runner and runs a single test. /// </summary> /// <param name="testInfo"></param> /// <param name="errorReason"></param> /// <returns></returns> private TestRunResults RunHpToolsTest(TestInfo testInfo, ref string errorReason) { var testPath = testInfo.TestPath; var type = Helper.GetTestType(testPath); // if we have at least one environment for parallel runner, // then it must be enabled var isParallelRunnerEnabled = _parallelRunnerEnvironments.Count > 0; if (isParallelRunnerEnabled && type == TestType.QTP) { type = TestType.ParallelRunner; } // if the current test is an api test ignore the parallel runner flag // and just continue as usual else if (isParallelRunnerEnabled && type == TestType.ST) { ConsoleWriter.WriteLine("ParallelRunner does not support API tests, treating as normal test."); } IFileSysTestRunner runner = null; switch (type) { case TestType.ST: runner = new ApiTestRunner(this, _timeout - _stopwatch.Elapsed); break; case TestType.QTP: runner = new GuiTestRunner(this, _useUFTLicense, _timeout - _stopwatch.Elapsed, _uftRunMode, _mcConnection, _mobileInfoForAllGuiTests); break; case TestType.LoadRunner: AppDomain.CurrentDomain.AssemblyResolve += Helper.HPToolsAssemblyResolver; runner = new PerformanceTestRunner(this, _timeout, _pollingInterval, _perScenarioTimeOutMinutes, _ignoreErrorStrings, _displayController, _analysisTemplate, _summaryDataLogger, _scriptRTSSet); break; case TestType.ParallelRunner: runner = new ParallelTestRunner(this, _timeout - _stopwatch.Elapsed, _mcConnection, _mobileInfoForAllGuiTests, _parallelRunnerEnvironments); break; } if (runner != null) { if (!_colRunnersForCleanup.ContainsKey(type)) { _colRunnersForCleanup.Add(type, runner); } Stopwatch s = Stopwatch.StartNew(); var results = runner.RunTest(testInfo, ref errorReason, RunCancelled); results.Runtime = s.Elapsed; if (type == TestType.LoadRunner) { AppDomain.CurrentDomain.AssemblyResolve -= Helper.HPToolsAssemblyResolver; } return(results); } //check for abortion if (System.IO.File.Exists(_abortFilename)) { ConsoleWriter.WriteLine(Resources.GeneralStopAborted); //stop working Environment.Exit((int)Launcher.ExitCodeEnum.Aborted); } return(new TestRunResults { ErrorDesc = "Unknown TestType", TestState = TestState.Error }); }
/// <summary> /// runs all tests given to this runner and returns a suite of run results /// </summary> /// <returns>The rest run results for each test</returns> public override TestSuiteRunResults Run() { //create a new Run Results object TestSuiteRunResults activeRunDesc = new TestSuiteRunResults(); double totalTime = 0; try { var start = DateTime.Now; Dictionary <string, int> indexList = new Dictionary <string, int>(); foreach (var test in _tests) { indexList[test.TestPath] = 0; } Dictionary <string, int> rerunList = createDictionary(_tests); foreach (var test in _tests) { if (indexList[test.TestPath] == 0) { indexList[test.TestPath] = 1; } if (RunCancelled()) { break; } var testStart = DateTime.Now; string errorReason = string.Empty; TestRunResults runResult = null; try { runResult = RunHpToolsTest(test, ref errorReason); } catch (Exception ex) { runResult = new TestRunResults { TestState = TestState.Error, ErrorDesc = ex.Message, TestName = test.TestName, TestPath = test.TestPath }; } //get the original source for this test, for grouping tests under test classes runResult.TestGroup = test.TestGroup; activeRunDesc.TestRuns.Add(runResult); //if fail was terminated before this step, continue if (runResult.TestState != TestState.Failed) { if (runResult.TestState != TestState.Error) { Helper.GetTestStateFromReport(runResult); } else { if (string.IsNullOrEmpty(runResult.ErrorDesc)) { runResult.ErrorDesc = RunCancelled() ? HpToolsLauncher.Properties.Resources.ExceptionUserCancelled : HpToolsLauncher.Properties.Resources.ExceptionExternalProcess; } runResult.ReportLocation = null; runResult.TestState = TestState.Error; } } if (runResult.TestState == TestState.Passed && runResult.HasWarnings) { runResult.TestState = TestState.Warning; ConsoleWriter.WriteLine(Resources.FsRunnerTestDoneWarnings); } else { ConsoleWriter.WriteLine(string.Format(Resources.FsRunnerTestDone, runResult.TestState)); if (runResult.TestState == TestState.Error) { ConsoleWriter.WriteErrLine(runResult.ErrorDesc); } } ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Test complete: " + runResult.TestPath + "\n-------------------------------------------------------------------------------------------------------"); UpdateCounters(runResult.TestState); var testTotalTime = (DateTime.Now - testStart).TotalSeconds; //create test folders if (rerunList[test.TestPath] > 0) { if (!Directory.Exists(Path.Combine(test.TestPath, "Report1"))) { rerunList[test.TestPath]--; } else { indexList[test.TestPath]++; rerunList[test.TestPath]--; } } //update report folder String uftReportDir = Path.Combine(test.TestPath, "Report"); String uftReportDirNew = Path.Combine(test.TestPath, "Report" + indexList[test.TestPath]); if (Directory.Exists(uftReportDir)) { if (Directory.Exists(uftReportDirNew)) { DelecteDirectory(uftReportDirNew); } //rename Report folder to Report1,2,...,N Directory.Move(uftReportDir, uftReportDirNew); } } totalTime = (DateTime.Now - start).TotalSeconds; } finally { activeRunDesc.NumTests = _tests.Count; activeRunDesc.NumErrors = _errors; activeRunDesc.TotalRunTime = TimeSpan.FromSeconds(totalTime); activeRunDesc.NumFailures = _fail; foreach (IFileSysTestRunner cleanupRunner in _colRunnersForCleanup.Values) { cleanupRunner.CleanUp(); } } return(activeRunDesc); }
/// <summary> /// creates instance of the runner given a source. /// </summary> /// <param name="sources"></param> /// <param name="timeout"></param> /// <param name="scriptRtsSet"></param> /// <param name="reportPath"></param> /// <param name="controllerPollingInterval"></param> /// <param name="perScenarioTimeOutMinutes"></param> /// <param name="ignoreErrorStrings"></param> /// <param name="jenkinsEnvVariables"></param> /// <param name="mcConnection"></param> /// <param name="mobileInfo"></param> /// <param name="parallelRunnerEnvironments"></param> /// <param name="displayController"></param> /// <param name="analysisTemplate"></param> /// <param name="summaryDataLogger"></param> /// <param name="useUftLicense"></param> public FileSystemTestsRunner(List <TestData> sources, TimeSpan timeout, int controllerPollingInterval, TimeSpan perScenarioTimeOutMinutes, List <string> ignoreErrorStrings, Dictionary <string, string> jenkinsEnvVariables, McConnectionInfo mcConnection, string mobileInfo, Dictionary <string, List <string> > parallelRunnerEnvironments, bool displayController, string analysisTemplate, SummaryDataLogger summaryDataLogger, List <ScriptRTSModel> scriptRtsSet, string reportPath, bool useUftLicense = false) { _jenkinsEnvVariables = jenkinsEnvVariables; //search if we have any testing tools installed if (!Helper.IsTestingToolsInstalled(TestStorageType.FileSystem)) { ConsoleWriter.WriteErrLine(string.Format(Resources.FileSystemTestsRunner_No_HP_testing_tool_is_installed_on, System.Environment.MachineName)); Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } _timeout = timeout; ConsoleWriter.WriteLine("FileSystemTestRunner timeout is " + _timeout); _stopwatch = Stopwatch.StartNew(); _pollingInterval = controllerPollingInterval; _perScenarioTimeOutMinutes = perScenarioTimeOutMinutes; _ignoreErrorStrings = ignoreErrorStrings; _useUFTLicense = useUftLicense; _displayController = displayController; _analysisTemplate = analysisTemplate; _summaryDataLogger = summaryDataLogger; _scriptRTSSet = scriptRtsSet; _tests = new List <TestInfo>(); _mcConnection = mcConnection; _mobileInfoForAllGuiTests = mobileInfo; _parallelRunnerEnvironments = parallelRunnerEnvironments; ConsoleWriter.WriteLine("UFT Mobile connection info is - " + _mcConnection.ToString()); if (reportPath != null) { ConsoleWriter.WriteLine("Results directory is: " + reportPath); } //go over all sources, and create a list of all tests foreach (TestData source in sources) { List <TestInfo> testGroup = new List <TestInfo>(); try { //--handle directories which contain test subdirectories (recursively) if (Helper.IsDirectory(source.Tests)) { var testsLocations = Helper.GetTestsLocations(source.Tests); foreach (var loc in testsLocations) { var test = new TestInfo(loc, loc, source.Tests, source.Id); testGroup.Add(test); } } //--handle mtb files (which contain links to tests) else //file might be LoadRunner scenario or //mtb file (which contain links to tests) //other files are dropped { testGroup = new List <TestInfo>(); FileInfo fi = new FileInfo(source.Tests); if (fi.Extension == Helper.LoadRunnerFileExtention) { testGroup.Add(new TestInfo(source.Tests, source.Tests, source.Tests, source.Id)); } else if (fi.Extension == ".mtb") //if (source.TrimEnd().EndsWith(".mtb", StringComparison.CurrentCultureIgnoreCase)) { MtbManager manager = new MtbManager(); var paths = manager.Parse(source.Tests); foreach (var p in paths) { testGroup.Add(new TestInfo(p, p, source.Tests, source.Id)); } } else if (fi.Extension == ".mtbx") { testGroup = MtbxManager.Parse(source.Tests, _jenkinsEnvVariables, source.Tests); // set the test Id for each test from the group // this is important for parallel runner foreach (var testInfo in testGroup) { testInfo.TestId = source.Id; } } } } catch (Exception) { testGroup = new List <TestInfo>(); } //--handle single test dir, add it with no group if (testGroup.Count == 1) { testGroup[0].TestGroup = "Test group"; } _tests.AddRange(testGroup); } if (_tests == null || _tests.Count == 0) { ConsoleWriter.WriteLine(Resources.FsRunnerNoValidTests); Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } // if a custom path was provided,set the custom report path for all the valid tests(this will overwrite the default location) if (reportPath != null) { _tests.ForEach(test => test.ReportPath = reportPath); } ConsoleWriter.WriteLine(string.Format(Resources.FsRunnerTestsFound, _tests.Count)); foreach (var test in _tests) { ConsoleWriter.WriteLine("" + test.TestName); if (parallelRunnerEnvironments.ContainsKey(test.TestId)) { parallelRunnerEnvironments[test.TestId].ForEach( env => ConsoleWriter.WriteLine(" " + env)); } } ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); }