private void UploadALM(string strSelectedScriptName, string strStatus, string strFailureReason, string strJenkinsLog) { string strSelectedTestSetName = treeViewALM.SelectedNode.Name; string strSelectedTestSetFullPath = BuildPath(treeViewALM.SelectedNode);; TestSetTreeManager testSetTreeMgr = (TestSetTreeManager)conn.TestSetTreeManager; TestSetFolder rootTestSetFolder = (TestSetFolder)testSetTreeMgr.get_NodeByPath(strSelectedTestSetFullPath); List testSetList = rootTestSetFolder.FindTestSets(strSelectedTestSetName, false, string.Empty); IEnumerator enumerator = testSetList.GetEnumerator();; if (enumerator.MoveNext()) { TestSet testSet = (TestSet)enumerator.Current; TSTestFactory tsTestFactory = (TSTestFactory)testSet.TSTestFactory; ListClass testList = (ListClass)tsTestFactory.NewList(string.Empty); IEnumerator testListEnum = testList.GetEnumerator(); while (testListEnum.MoveNext()) { object item = testListEnum.Current; TSTest atest = (TSTest)testListEnum.Current; if (atest.TestName.ToLower() == strSelectedScriptName.ToLower()) { atest.Status = strStatus; atest["TC_USER_01"] = strFailureReason; atest["TC_USER_09"] = strJenkinsLog; atest.Post(); } } } }
private void treeViewALM_AfterSelect(object sender, TreeViewEventArgs e) { Cursor.Current = Cursors.WaitCursor; if (e.Node.Tag != null) { if (e.Node.Tag.ToString().ToLower() == "testset") { TestSetTreeManager testSetTreeMgr = (TestSetTreeManager)conn.TestSetTreeManager; var path = BuildPath(e.Node); TestSetFolder rootTestSetFolder = (TestSetFolder)testSetTreeMgr.get_NodeByPath(path); List testSetList = rootTestSetFolder.FindTestSets(e.Node.Text, false, string.Empty); IEnumerator enumerator = testSetList.GetEnumerator(); if (enumerator.MoveNext()) { TestSet testSet = (TestSet)enumerator.Current; TSTestFactory tsTestFactory = (TSTestFactory)testSet.TSTestFactory; ListClass testList = (ListClass)tsTestFactory.NewList(string.Empty); IEnumerator testListEnum = testList.GetEnumerator(); if (dtALMTestDetails == null) { dtALMTestDetails = BuildDataTable(); } else { dtALMTestDetails.Clear(); } List TSSetFields = tsTestFactory.Fields; while (testListEnum.MoveNext()) { DataRow drTestDetails = dtALMTestDetails.NewRow(); object item = testListEnum.Current; TSTest atest = (TSTest)testListEnum.Current; drTestDetails["TestName"] = atest.TestName; drTestDetails["Status"] = atest.Status; drTestDetails["FailureReason"] = atest["TC_USER_01"]; drTestDetails["JenkinsLog"] = atest["TC_USER_09"]; dtALMTestDetails.Rows.Add(drTestDetails); } } dgvALM.DataSource = dtALMTestDetails; } else { dtALMTestDetails.Clear(); dgvALM.DataSource = dtALMTestDetails; } } else { dtALMTestDetails.Clear(); dgvALM.DataSource = dtALMTestDetails; } Cursor.Current = Cursors.Default; }
private void CreateTestInstance(TestSet ts, List <string> TestID) { TSTestFactory tsF = ts.TSTestFactory as TSTestFactory; foreach (string id in TestID) { TSTest TestInstance = tsF.AddItem(id) as TSTest; TestInstance.Post(); } }
public void CreateTestInstance(string TestSetFolderPath, string testsetName, List <string> TestID) { TestSet ts = CreateTestSet_Internal(TestSetFolderPath, testsetName); TSTestFactory tsF = ts.TSTestFactory as TSTestFactory; foreach (string id in TestID) { TSTest TestInstance = tsF.AddItem(id) as TSTest; TestInstance.Post(); } }
/// <summary> /// The IniatilizeTestSet. /// </summary> private void IniatilizeTestSet() { TSTestFactory tSTestFactory = this.TestSet.TSTestFactory as TSTestFactory; ITestSetExecutionReportSettings reportSettings = this.TestSet.ExecutionReportSettings as ITestSetExecutionReportSettings; this.EmailList = reportSettings.EMailTo; IExecEventNotifyByMailSettings2 execNotifyByMail = this.TestSet.ExecEventNotifyByMailSettings as IExecEventNotifyByMailSettings2; if (execNotifyByMail.Enabled[(int)TDAPI_EXECUTIONEVENT.EXECEVENT_TESTFAIL]) { this.FailedEmailList = execNotifyByMail.EMailTo; } this.Description = this.GetField("Test Set Description"); this.Baseline = this.GetField("Test Set Baseline"); this.TargetCycle = this.GetField("Test Set Target Cycle"); ITDFilter tDFilter = tSTestFactory.Filter as ITDFilter; List testcase = tDFilter.NewList(); this.TotalTestCases = testcase.Count; // Populate the testCaseDict Lookup foreach (ITSTest test in testcase) { int testCaseID = int.Parse($"{test.ID}"); this.testCaseDict.Add(testCaseID, test); // Console.WriteLine($"({testCaseID}) - {test.Name}"); } Dictionary <int, Dictionary <int, CondInfo> > conditionLookup = this.CreateConditionLookupAndPopulateDependencyGraph(); this.currTestCaseID = this.FindStartingTestCaseID(conditionLookup); // create linear flow based on dependency graph this.CreateLinearFlow(conditionLookup, this.currTestCaseID); this.Started = DateTime.Now; }
/** * set status for tests in a test set and update in QC * * @param TestSet testSet * @param Dictionary<string, string> testResults - testCaseName, testResult (e.g. "EHR_REF_PAT_0001", "Passed") */ public void recordTestSetResults(TestSet testSet, Dictionary <string, string> testResults) { TestSetFolder tsFolder = (TestSetFolder)testSet.TestSetFolder; log.Debug("tsFolder.Path: " + tsFolder.Path); string testSetInfo = "testSet.ID: " + testSet.ID.ToString() + DELIM + "testSet.Name: " + testSet.Name + DELIM + "testSet.Status: " + testSet.Status + DELIM + ""; log.Debug("testSetInfo: " + testSetInfo); TSTestFactory tsTestFactory = (TSTestFactory)testSet.TSTestFactory; List tsTestList = tsTestFactory.NewList(""); foreach (TSTest tsTest in tsTestList) { testCount++; string testInfo = DELIM + DELIM + DELIM + "TestId: " + tsTest.TestId + DELIM + "TestName: " + tsTest.TestName + DELIM + ""; Run lastRun = (Run)tsTest.LastRun; if (lastRun != null) { testInfo += lastRun.Name + DELIM + lastRun.Status; } log.Debug("TestInfo: " + testInfo); // look for a test in the results from this test set if (testResults.ContainsKey(tsTest.TestName)) { string status = testResults[tsTest.TestName]; recordTestResult(tsTest, status); } } }
/// <summary> /// writes a summary of the test run after it's over /// </summary> /// <param name="prevTest"></param> private string GetTestInstancesString(ITestSet set) { string retVal = ""; try { TSTestFactory factory = set.TSTestFactory; List list = factory.NewList(""); if (list == null) { return(""); } foreach (ITSTest testInstance in list) { retVal += testInstance.ID + ","; } retVal.TrimEnd(", \n".ToCharArray()); } catch (Exception ex) { } return(retVal); }
public static Object GetTSRunStatus(dynamic TSItem) { TestSetFactory TSetFact = mTDConn.TestSetFactory; TDFilter tsFilter = TSetFact.Filter; tsFilter["CY_CYCLE_ID"] = "" + TSItem.TestSetID + ""; List Testset = TSetFact.NewList(tsFilter.Text); foreach (TestSet testset in Testset) { if (testset.Name == TSItem.TestSetName) { TSTestFactory TSTestFact = testset.TSTestFactory; TDFilter tsTestFilter = TSetFact.Filter; tsTestFilter["TC_CYCLE_ID"] = "" + TSItem.TestSetID + ""; List TSActivities = TSTestFact.NewList(tsTestFilter.Text); foreach (TSTest tst in TSActivities) { bool existing = false; foreach (string[] status in TSItem.TestSetStatuses) { if (status[0] == tst.Status) { existing = true; status[1] = (Int32.Parse(status[1]) + 1).ToString(); } } if (!existing) { TSItem.TestSetStatuses.Add(new string[] { tst.Status, "1" }); } } } } return(TSItem); }
/// <summary> /// runs a test set with given parameters (and a valid connection to the QC server) /// </summary> /// <param name="tsFolderName">testSet folder name</param> /// <param name="tsName">testSet name</param> /// <param name="timeout">-1 for unlimited, or number of miliseconds</param> /// <param name="runMode">run on LocalMachine or remote</param> /// <param name="runHost">if run on remote machine - remote machine name</param> /// <returns></returns> public TestSuiteRunResults RunTestSet(string tsFolderName, string tsName, double timeout, QcRunMode runMode, string runHost) { string currentTestSetInstances = ""; TestSuiteRunResults runDesc = new TestSuiteRunResults(); TestRunResults activeTestDesc = null; var tsFactory = tdConnection.TestSetFactory; var tsTreeManager = (ITestSetTreeManager)tdConnection.TestSetTreeManager; List tsList = null; string tsPath = "Root\\" + tsFolderName; ITestSetFolder tsFolder = null; try { tsFolder = (ITestSetFolder)tsTreeManager.get_NodeByPath(tsPath); } catch (COMException ex) { //not found tsFolder = null; } if (tsFolder == null) { //node wasn't found, folder = null ConsoleWriter.WriteErrLine(string.Format(Resources.AlmRunnerNoSuchFolder, tsFolder)); //this will make sure run will fail at the end. (since there was an error) Launcher.ExitCode = Launcher.ExitCodeEnum.Failed; return(null); } else { tsList = tsFolder.FindTestSets(tsName); } if (tsList == null) { ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerCantFindTest, tsName)); //this will make sure run will fail at the end. (since there was an error) Launcher.ExitCode = Launcher.ExitCodeEnum.Failed; return(null); } ITestSet targetTestSet = null; foreach (ITestSet ts in tsList) { if (ts.Name.Equals(tsName, StringComparison.InvariantCultureIgnoreCase)) { targetTestSet = ts; break; } } if (targetTestSet == null) { ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerCantFindTest, tsName)); //this will make sure run will fail at the end. (since there was an error) Launcher.ExitCode = Launcher.ExitCodeEnum.Failed; return(null); } ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); ConsoleWriter.WriteLine(Resources.AlmRunnerStartingExecution); ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerDisplayTest, tsName, targetTestSet.ID)); ITSScheduler Scheduler = null; try { //need to run this to install everyhting needed http://AlmServer:8080/qcbin/start_a.jsp?common=true //start the scheduler Scheduler = targetTestSet.StartExecution(""); } catch (Exception ex) { Scheduler = null; } try { currentTestSetInstances = GetTestInstancesString(targetTestSet); } catch (Exception ex) { } if (Scheduler == null) { Console.WriteLine(GetAlmNotInstalledError()); //proceeding with program execution is tasteless, since nothing will run without a properly installed QC. Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } TSTestFactory tsTestFactory = targetTestSet.TSTestFactory; ITDFilter2 tdFilter = tsTestFactory.Filter; tdFilter["TC_CYCLE_ID"] = targetTestSet.ID.ToString(); IList tList = tsTestFactory.NewList(tdFilter.Text); try { //set up for the run depending on where the test instances are to execute switch (runMode) { case QcRunMode.RUN_LOCAL: // run all tests on the local machine Scheduler.RunAllLocally = true; break; case QcRunMode.RUN_REMOTE: // run tests on a specified remote machine Scheduler.TdHostName = runHost; break; // RunAllLocally must not be set for remote invocation of tests. As such, do not do this: Scheduler.RunAllLocally = False case QcRunMode.RUN_PLANNED_HOST: // run on the hosts as planned in the test set Scheduler.RunAllLocally = false; break; } } catch (Exception ex) { ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerProblemWithHost, ex.Message)); } ConsoleWriter.WriteLine(Resources.AlmRunnerNumTests + tList.Count); int i = 1; foreach (ITSTest3 test in tList) { string runOnHost = runHost; if (runMode == QcRunMode.RUN_PLANNED_HOST) { runOnHost = test.HostName; } //if host isn't taken from QC (PLANNED) and not from the test definition (REMOTE), take it from LOCAL (machineName) string hostName = runOnHost; if (runMode == QcRunMode.RUN_LOCAL) { hostName = Environment.MachineName; } ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerDisplayTestRunOnHost, i, test.Name, hostName)); Scheduler.RunOnHost[test.ID] = runOnHost; var testResults = new TestRunResults(); testResults.TestName = test.Name; runDesc.TestRuns.Add(testResults); i = i + 1; } Stopwatch sw = Stopwatch.StartNew(); Stopwatch testSw = null; try { //tests are actually run Scheduler.Run(); } catch (Exception ex) { ConsoleWriter.WriteLine(Resources.AlmRunnerRunError + ex.Message); } ConsoleWriter.WriteLine(Resources.AlmRunnerSchedStarted + DateTime.Now.ToString(Launcher.DateFormat)); ConsoleWriter.WriteLine(Resources.SingleSeperator); IExecutionStatus executionStatus = Scheduler.ExecutionStatus; bool tsExecutionFinished = false; ITSTest prevTest = null; ITSTest currentTest = null; string abortFilename = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\stop" + Launcher.UniqueTimeStamp + ".txt"; //wait for the tests to end ("normally" or because of the timeout) while ((tsExecutionFinished == false) && (timeout == -1 || sw.Elapsed.TotalSeconds < timeout)) { executionStatus.RefreshExecStatusInfo("all", true); tsExecutionFinished = executionStatus.Finished; if (System.IO.File.Exists(abortFilename)) { break; } for (int j = 1; j <= executionStatus.Count; ++j) { TestExecStatus testExecStatusObj = executionStatus[j]; activeTestDesc = UpdateTestStatus(runDesc, targetTestSet, testExecStatusObj, true); if (activeTestDesc.PrevTestState != activeTestDesc.TestState) { TestState tstate = activeTestDesc.TestState; if (tstate == TestState.Running) { currentTest = targetTestSet.TSTestFactory[testExecStatusObj.TSTestId]; int testIndex = GetIdxByTestName(currentTest.Name, runDesc); int prevRunId = GetTestRunId(currentTest); runDesc.TestRuns[testIndex].PrevRunId = prevRunId; //closing previous test if (prevTest != null) { WriteTestRunSummary(prevTest); } //starting new test prevTest = currentTest; //assign the new test the consol writer so it will gather the output ConsoleWriter.ActiveTestRun = runDesc.TestRuns[testIndex]; ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running: " + currentTest.Name); //tell user that the test is running ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running test: " + activeTestDesc.TestName + ", Test id: " + testExecStatusObj.TestId + ", Test instance id: " + testExecStatusObj.TSTestId); //start timing the new test run string foldername = ""; ITestSetFolder folder = targetTestSet.TestSetFolder as ITestSetFolder; if (folder != null) { foldername = folder.Name.Replace(".", "_"); } //the test group is it's test set. (dots are problematic since jenkins parses them as seperators between packadge and class) activeTestDesc.TestGroup = foldername + "\\" + targetTestSet.Name; activeTestDesc.TestGroup = activeTestDesc.TestGroup.Replace(".", "_"); } TestState enmState = GetTsStateFromQcState(testExecStatusObj.Status as string); string statusString = enmState.ToString(); if (enmState == TestState.Running) { ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerStat, activeTestDesc.TestName, testExecStatusObj.TSTestId, statusString)); } else if (enmState != TestState.Waiting) { ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerStatWithMessage, activeTestDesc.TestName, testExecStatusObj.TSTestId, statusString, testExecStatusObj.Message)); } if (System.IO.File.Exists(abortFilename)) { break; } } } //wait 0.2 seconds Thread.Sleep(200); //check for abortion if (System.IO.File.Exists(abortFilename)) { _blnRunCancelled = true; ConsoleWriter.WriteLine(Resources.GeneralStopAborted); //stop all test instances in this testSet. Scheduler.Stop(currentTestSetInstances); ConsoleWriter.WriteLine(Resources.GeneralAbortedByUser); //stop working Environment.Exit((int)Launcher.ExitCodeEnum.Aborted); } } //check status for each test if (timeout == -1 || sw.Elapsed.TotalSeconds < timeout) { //close last test if (prevTest != null) { WriteTestRunSummary(prevTest); } //done with all tests, stop collecting output in the testRun object. ConsoleWriter.ActiveTestRun = null; for (int k = 1; k <= executionStatus.Count; ++k) { if (System.IO.File.Exists(abortFilename)) { break; } TestExecStatus testExecStatusObj = executionStatus[k]; activeTestDesc = UpdateTestStatus(runDesc, targetTestSet, testExecStatusObj, false); UpdateCounters(activeTestDesc, runDesc); currentTest = targetTestSet.TSTestFactory[testExecStatusObj.TSTestId]; string testPath = "Root\\" + tsFolderName + "\\" + tsName + "\\" + activeTestDesc.TestName; activeTestDesc.TestPath = testPath; } //update the total runtime runDesc.TotalRunTime = sw.Elapsed; ConsoleWriter.WriteLine(string.Format(Resources.AlmRunnerTestsetDone, tsName, DateTime.Now.ToString(Launcher.DateFormat))); } else { _blnRunCancelled = true; ConsoleWriter.WriteLine(Resources.GeneralTimedOut); Launcher.ExitCode = Launcher.ExitCodeEnum.Aborted; } return(runDesc); }
private void Button_Click_1(object sender, RoutedEventArgs e) { try { TDConnection tdConn = new TDConnection(); tdConn.InitConnectionEx(qcUrl.Text); tdConn.ConnectProjectEx(qcDomain.Text, qcProject.Text, qcLogin.Text, qcPassword.Password); //MessageBox.Show((string)qcUrl.Text); //MessageBox.Show((string)qcDomain.Text); //MessageBox.Show((string)qcProject.Text); //MessageBox.Show((string)qcLogin.Text); //MessageBox.Show((string)qcPassword.Password); //MessageBox.Show((string)qcDomain.Text); //MessageBox.Show((string)testFolder.Text); //MessageBox.Show((string)testSet.Text); //RunFactory runFactory = (RunFactory)test.RunFactory;// //string testFolder = "^" + @"Root\MULTUM\Monthly Testing\SDK v4\"; //string testSet = "BNF SDKv4 Server and Update"; TestSetFactory tsFactory = (TestSetFactory)tdConn.TestSetFactory; TestSetTreeManager tsTreeMgr = (TestSetTreeManager)tdConn.TestSetTreeManager; TestSetFolder tsFolder = (TestSetFolder)tsTreeMgr.get_NodeByPath(testFolder.Text); TDAPIOLELib.List tsList = (TDAPIOLELib.List)tsFolder.FindTestSets((string)testSet.Text, false, null); //TDAPIOLELib.List tsTestList = tsFactory.NewList(""); //List tsList = tsFolder.FindTestSets(testSet, false, "status=No Run"); //Feature\Multum\Black Box\Monthly Testing\SDK v4 foreach (TestSet testset in tsList) { Console.WriteLine("Test Set Folder Path: {0}", testFolder); Console.WriteLine("Test Set: {0}", testset.Name); TestSetFolder tsfolder = (TestSetFolder)testset.TestSetFolder; //string testFolder = "^" + @"Root\MULTUM\Monthly Testing\SDK v4\"; TSTestFactory tsTestFactory = (TSTestFactory)testset.TSTestFactory; TDAPIOLELib.List tsTestList = tsTestFactory.NewList(""); //} foreach (TSTest tsTest in tsTestList)//no such interface supported { Console.WriteLine("Test Set: {0}", tsTest.Name); //Console.ReadLine(); string status = (string)tsTest.Status; Console.WriteLine("STATUS {0}", status); //Console.WriteLine("PARAMS {0}",tsTest.Params); Console.WriteLine("PARAMS {0}", tsTest.History); TDAPIOLELib.Run lastRun = (TDAPIOLELib.Run)tsTest.LastRun; // don't update test if it may have been modified by someone else if (lastRun == null) { RunFactory runFactory = (RunFactory)tsTest.RunFactory; TDAPIOLELib.List runs = runFactory.NewList(""); Console.WriteLine("test runs: {0}", runs.Count); String date = DateTime.Now.ToString("MM-dd_hh-mm-ss"); TDAPIOLELib.Run run = (TDAPIOLELib.Run)runFactory.AddItem("Run_" + date); //Run_5-23_14-49-52 var oRunInstance = (RunFactory)tsTest.RunFactory; //var oRun = (Run)oRunInstance.AddItem("Performance Test"); //run.Status = "Passed"; //run.Post(); //run.Refresh(); var oTest = (Test)tsTest.Test; var tsDesignStepList = oTest.DesignStepFactory.NewList(""); var oStepFactory = (StepFactory)run.StepFactory; foreach (DesignStep oDesignStep in tsDesignStepList) { var oStep = (Step)oStepFactory.AddItem(oDesignStep.StepName); //oStep.Status = "Passed"; //oStep.Post(); } } } } tdConn.DisconnectProject(); tdConn.Disconnect(); } catch (Exception ex) { MessageBox.Show(ex.Message); MessageBox.Show(ex.StackTrace); } }
public int AddTest( int TestSetId, int TestConfigId, string[] Additional = default(string[]), bool RemoveTestOnUpdateFail = default(bool)) { int result = 0; try { if (!Connect(ServerUrl, Username, Password, Domain, Project)) { return(0); } //Get the test ID from the config factory TestConfigFactory TestConfigFact = tdc.TestConfigFactory; TestConfig TestConfig = TestConfigFact[TestConfigId]; int TestId = TestConfig.TestId; //API provides no way to add a specific test configuration to the test set //Instead we will add the test to the test set then remove the unneeded instances TestSetFactory TSFact = tdc.TestSetFactory; TestSet TestSet = TSFact[TestSetId]; TSTestFactory TSTestFact = TestSet.TSTestFactory; //Capture the starting list of tests in the test set List StartingTestList = TSTestFact.NewList(""); System.Collections.Generic.List <int> StartingTestInstanceList = new List <int>(); foreach (TSTest testInstance in StartingTestList) { StartingTestInstanceList.Add(int.Parse(testInstance.ID)); } //Add the test to the test set TSTestFact.AddItem(TestId); //Capture the new list of tests in the test set List EndingTestList = TSTestFact.NewList(""); System.Collections.Generic.List <int> EndingTestInstanceList = new List <int>(); foreach (TSTest testInstance in EndingTestList) { EndingTestInstanceList.Add(int.Parse(testInstance.ID)); } //Remove added tests that we don't want TSTest tempInstance, addedTestInstance = null; TestConfig tempConfig; foreach (int testInstanceId in EndingTestInstanceList) { if (!StartingTestInstanceList.Contains(testInstanceId)) { tempInstance = TSTestFact[testInstanceId]; tempConfig = tempInstance.TestConfiguration; if (!TestConfigId.Equals(tempConfig.ID)) { TSTestFact.RemoveItem(tempInstance.ID); } else { addedTestInstance = tempInstance; } } } result = int.Parse(addedTestInstance.ID); //Set additional field values if (Additional != default(string[])) { foreach (string fieldPair in Additional) { string[] tempFieldArray = fieldPair.Split(new[] { ";;" }, StringSplitOptions.None); addedTestInstance[tempFieldArray[0]] = tempFieldArray[1]; } addedTestInstance.Post(); } } catch (COMException ce) { rr.AddErrorLine(HandleException(ce)); if (RemoveTestOnUpdateFail && result > 0) { TSTestFactory fact = tdc.TSTestFactory; fact.RemoveItem(result); result = 0; } } Disconnect(); return(result); }
public bool Run(TDConnectionClass tdConn, TestSet RunSet, string configPath) { try { //Console.WriteLine("Test run from Interface of IRunTest!"); AutoLog.Info("[CP Run]Test run from IRunTest: " + RunSet.Name); //Debug.Print("PAS: Test run from Interface of IRunTest!"); //Console.Out.WriteLine("----------[CP Run]Test run from IRunTest: " + RunSet.Name); TestConfigFile tConfig = new TestConfigFile(); bool success = false; //AutoLog.Info("[CP Run]Configuration file path: " + configPath); success = TestUtility.LoadConfigFile(ref tConfig, configPath); if (!success) { AutoLog.Info("[CP Run]Load Configuration file failed."); return(true); } // invoke the test execution TSTestFactory TSTestFact = RunSet.TSTestFactory as TSTestFactory; List runList = new List(); runList = TSTestFact.NewList("") as List; foreach (TSTest instance in runList) { //generate the run first RunFactory runFact = instance.RunFactory as RunFactory; DateTime now = DateTime.Now; Run instanceRun = runFact.AddItem("Run_" + now.ToShortDateString() + "_" + now.ToShortTimeString()) as Run; QCOperation.QCInformation info = new QCOperation.QCInformation(); // string runID = instanceRun.ID as string; //Initial the start status info.SetTestRunStatus(tdConn, instanceRun.ID.ToString(), "Not Completed"); //Add the run steps //info.SetTestRunStep(tdConn, runID, "Step 1", "Passed"); //info.SetTestRunStep(tdConn, runID, "Step 2", "Failed"); //Update the end status //info.SetTestRunStatus(tdConn, runID, "Failed"); //Download test case attachments string caseID = instance.TestId as string; ArrayList attachments = new ArrayList(); QCOperation.QCAttachment taa = new QCOperation.QCAttachment(); foreach (string downfile in tConfig.RunParameter.DownloadFile) { attachments.Add(taa.DownloadAttachment(tdConn, "TEST", caseID, downfile, @"C:\CSAutoTest\Temp")); } //When finish the test, record the summary in instance of testset string instanceID = instance.ID as string; string scriptFilename = null; string dataFilename = null; for (int i = 0; i < attachments.Count; i++) { ArrayList downList = attachments[i] as ArrayList; if (downList.Count > 0) { foreach (Object fileObj in downList) { string tempFilePath = fileObj as string; if (tempFilePath != null && tempFilePath.EndsWith("cod")) { scriptFilename = tempFilePath; } if (tempFilePath != null && tempFilePath.EndsWith("iod")) { dataFilename = tempFilePath; } } } } //AutoLog.Info("[CP Run]Config value of MacPath: " + tConfig.RunParameter.RemotePath); //AutoLog.Info("[CP Run]Config value of RunMac: " + tConfig.RunParameter.RunRemote.ToString()); if (tConfig.RunParameter.RunRemote) { dataFilename = ReplaceStringInFile(dataFilename, tConfig.RunParameter.RemotePath); } if (scriptFilename != null) { PAS.AutoTest.ScriptRunner.ScriptRunner sr = new PAS.AutoTest.ScriptRunner.ScriptRunner(); PAS.AutoTest.ScriptRunner.ExecuteResult er; //Debug.Print("PAS: Script file name: {0}", scriptFilename); //Debug.Print("PAS: Data file name: {0}", dataFilename); //Console.Out.WriteLine("----------[CP Run]Script file name: " + scriptFilename); AutoLog.Info("[CP Run]Script file name: " + scriptFilename); AutoLog.Info("[CP Run]Data file name: " + dataFilename); if (dataFilename != null) { er = sr.Run(scriptFilename, dataFilename, 1200); } else { er = sr.Run(scriptFilename, string.Empty, 1200); } switch (er.Result) { case PAS.AutoTest.TestData.TestResult.Done: info.SetTestRunStatus(tdConn, instanceRun.ID.ToString(), "Passed"); break; case PAS.AutoTest.TestData.TestResult.Pass: info.SetTestRunStatus(tdConn, instanceRun.ID.ToString(), "Passed"); break; case PAS.AutoTest.TestData.TestResult.Fail: info.SetTestRunStatus(tdConn, instanceRun.ID.ToString(), "Failed"); break; case PAS.AutoTest.TestData.TestResult.Incomplete: info.SetTestRunStatus(tdConn, instanceRun.ID.ToString(), "Not Completed"); break; //default: case PAS.AutoTest.TestData.TestResult.Warning: info.SetTestRunStatus(tdConn, instanceRun.ID.ToString(), "N/A"); break; } if (er.Result != PAS.AutoTest.TestData.TestResult.Incomplete) { info.SetTestInstanceSummary(tdConn, instanceID, tConfig.RunParameter.NumOfPassed, er.Output.Summary.Passed.ToString()); info.SetTestInstanceSummary(tdConn, instanceID, tConfig.RunParameter.NumOfTotal, er.Output.Summary.TotalRun.ToString()); string fileLog = ".log"; fileLog = "TEST_" + caseID + "_" + now.ToShortDateString() + "_" + now.ToShortTimeString() + ".log"; fileLog = ConvertChar(fileLog); er.Output.ConvertToXml(".\\Temp\\" + fileLog); bool uploadSuccess = false; uploadSuccess = taa.UploadAttachment(tdConn, "RUN", instanceRun.ID.ToString(), fileLog, GetCurrentRunDir() + "\\Temp\\"); if (uploadSuccess) { //Debug.Print("PAS: Upload test log sucess!"); //Console.Out.WriteLine("----------[CP Run]Upload test log sucess!"); AutoLog.Info("[CP Run]Upload test log success!"); } else { //Debug.Print("PAS: Upload test log fail!"); //Console.Out.WriteLine("----------[CP Run]Upload test log fail!"); AutoLog.Info("[CP Run]Upload test log fail!"); } } else { //Console.Out.WriteLine("----------[CP Run]Case run status is incomplete!"); AutoLog.Info("[CP Run]Case run status is incomplete!"); } } } } catch (Exception e) { //Debug.Print("PAS: Run test case error!"); //Console.Out.WriteLine("PAS: Run test case error!"); AutoLog.Info("[CP Run]Run test case exception: " + e.Message); } return(true); }
public bool Add(string TestSetFolderPath, string testsetName, string testName, string runName, string status, string _attachmentPath, string _attachmentName, string installogPath, string installlogName, List <RunSteps> steps) { TestSetFactory TestSetFact; TestSetTreeManager tsTreeMgr; TestSetFolder tSetFolder; List lst; TDFilter TestFilter; Run therun; tsTreeMgr = tdconn.TestSetTreeManager as TestSetTreeManager; tSetFolder = tsTreeMgr.Root as TestSetFolder; tSetFolder = tsTreeMgr.get_NodeByPath(TestSetFolderPath) as TestSetFolder; TestSetFact = tSetFolder.TestSetFactory as TestSetFactory; TestFilter = TestSetFact.Filter as TDFilter; TestFilter["CY_CYCLE"] = "'" + testsetName + "'"; lst = TestSetFact.NewList(TestFilter.Text); // list index from 1 TestSet ts = lst[1] as TestSet; TestFilter.Clear(); TSTestFactory tsF = ts.TSTestFactory as TSTestFactory; TestFilter = tsF.Filter as TDFilter; TestFilter["TS_NAME"] = "'" + testName + "'"; TSTest TestInstance = tsF.NewList(TestFilter.Text)[1] as TSTest; RunFactory rf = TestInstance.RunFactory as RunFactory; List runlist = rf.NewList(""); //update run is run exist for (int index = 1; index <= runlist.Count; index++) { therun = runlist[index] as Run; if (therun.Name == runName) { foreach (RunSteps runstep in steps) { runstep.Add(therun); } return(true); } } therun = rf.AddItem(runName) as Run; therun.Status = status; //therun["RN_DURATION"] = duration; therun.Post(); //add run if (File.Exists(_attachmentPath + "\\" + _attachmentName)) { AttachmentFactory attachFact = therun.Attachments as AttachmentFactory; Attachment attachfile; IExtendedStorage extStor; attachfile = attachFact.AddItem(_attachmentName) as Attachment; attachfile.AutoPost = true; extStor = attachFact.AttachmentStorage as IExtendedStorage; extStor.ClientPath = _attachmentPath; extStor.Save(_attachmentName, true); attachfile.Post(); } if (File.Exists(installogPath + "\\" + installlogName)) { AttachmentFactory attachFact = therun.Attachments as AttachmentFactory; Attachment attachfile; IExtendedStorage extStor; attachfile = attachFact.AddItem(installlogName) as Attachment; attachfile.AutoPost = true; extStor = attachFact.AttachmentStorage as IExtendedStorage; extStor.ClientPath = installogPath; extStor.Save(installlogName, true); attachfile.Post(); } therun.AutoPost = true; int count = 0; foreach (RunSteps runstep in steps) { ++count; Console.WriteLine("Uploading step " + count + ":" + runstep.StepName); runstep.Add(therun); } return(true); }
public void UploadResults(TestCase currentTestCase) { string testFolder = Convert.ToString(ExecutionSession.dictCommonData["QCFolder"]); string testSetName = currentTestCase.Category; TestSetFactory tsFactory = (TestSetFactory)qcConnect.TestSetFactory; TestSetTreeManager tsTreeMgr = (TestSetTreeManager)qcConnect.TestSetTreeManager; TestSetFolder tsFolder = (TestSetFolder)tsTreeMgr.get_NodeByPath(testFolder); List tsList = tsFolder.FindTestSets(testSetName, false, null); TestSet testSet = tsList[1]; tsFolder = (TestSetFolder)testSet.TestSetFolder; TSTestFactory tsTestFactory = (TSTestFactory)testSet.TSTestFactory; List tsTestList = tsTestFactory.NewList(""); // And finally, update each test case status: foreach (TSTest tsTest in tsTestList) { if (currentTestCase.TestCaseName == tsTest.Name.Remove(0, 3)) { RunFactory runFactory = (RunFactory)tsTest.RunFactory; List allfields = runFactory.Fields; String browserValue = tsTest["TC_USER_TEMPLATE_10"]; Run lastRun = (Run)tsTest.LastRun; string runName = runFactory.UniqueRunName; RunFactory objRunFactory = tsTest.RunFactory; Run theRun = objRunFactory.AddItem(runName); theRun.Name = runName; theRun.CopyDesignSteps(); StepFactory Step = theRun.StepFactory; List stepList = (List)Step.NewList(""); if (currentTestCase.OverAllResult == OverAllResult.PASS) { theRun.Status = "Passed"; } else { theRun.Status = "Failed"; } theRun.Post(); //Delete current attachment from QC test set test case AttachmentFactory objAttachmentFactory = tsTest.Attachments; var objCurrentAttachments = objAttachmentFactory.NewList(""); for (int objC = 1; objC <= objCurrentAttachments.Count; objC++) { try { objAttachmentFactory.RemoveItem(tsTest.Attachments.NewList("").Item(1).ID); } catch { } } IAttachment objAttachment = objAttachmentFactory.AddItem(DBNull.Value); objAttachment.FileName = currentTestCase.QCHTMLReportPath; objAttachment.Type = 1; objAttachment.Post(); string[] filePaths = System.IO.Directory.GetFiles(currentTestCase.QCScreenShotPath); foreach (string file in filePaths) { objAttachment = objAttachmentFactory.AddItem(DBNull.Value); objAttachment.FileName = file; objAttachment.Type = 1; objAttachment.Post(); } break; } } }
public void UploadResults(TB2.TestCase currentTestCase) { string testFolder = @"Root\WCS 7up Core - 5022\zz Automation\PracticeExecution\Temp_Prashant\QA72_7_31"; string testSetName = currentTestCase.Category; TestSetFactory tsFactory = (TestSetFactory)qcConnect.TestSetFactory; TestSetTreeManager tsTreeMgr = (TestSetTreeManager)qcConnect.TestSetTreeManager; TestSetFolder tsFolder = (TestSetFolder)tsTreeMgr.get_NodeByPath(testFolder); List tsList = tsFolder.FindTestSets(testSetName, false, null); TestSet testSet = tsList[1]; //foreach (TestSet testSet in tsList) //{ tsFolder = (TestSetFolder)testSet.TestSetFolder; TSTestFactory tsTestFactory = (TSTestFactory)testSet.TSTestFactory; List tsTestList = tsTestFactory.NewList(""); // And finally, update each test case status: foreach (TSTest tsTest in tsTestList) { //System.Console.Out.WriteLine("Test Case ID: " + tsTest.ID + ", Test Case Name: " + tsTest.Name + "\n"); if (currentTestCase.TestCaseName == tsTest.Name.Remove(0, 3)) { RunFactory runFactory = (RunFactory)tsTest.RunFactory; List allfields = runFactory.Fields; String browserValue = tsTest["TC_USER_TEMPLATE_10"]; // Console.WriteLine("Browser value : " + browserValue); Run lastRun = (Run)tsTest.LastRun; string runName = runFactory.UniqueRunName; RunFactory objRunFactory = tsTest.RunFactory; Run theRun = objRunFactory.AddItem(runName); theRun.Name = runName; //Get the count of test steps and compare it with the number of steps that were actually executed //and define the Execution status accordinagly theRun.CopyDesignSteps(); StepFactory Step = theRun.StepFactory; List stepList = (List)Step.NewList(""); if (currentTestCase.OverAllResult == OverAllResult.PASS) { theRun.Status = "Passed"; } else { theRun.Status = "Failed"; } theRun.Post(); //Delete current attachment from QC test set test case AttachmentFactory objAttachmentFactory = tsTest.Attachments; objSkipExec: var objCurrentAttachments = objAttachmentFactory.NewList(""); for (int objC = 1; objC <= objCurrentAttachments.Count; objC++) { try { objAttachmentFactory.RemoveItem(tsTest.Attachments.NewList("").Item(1).ID); } catch { } } if (objAttachmentFactory.NewList("").Count > 0) { goto objSkipExec; } IAttachment objAttachment = objAttachmentFactory.AddItem(DBNull.Value); objAttachment.FileName = currentTestCase.HTMLReportPath; objAttachment.Type = 1; objAttachment.Post(); string[] filePaths = System.IO.Directory.GetFiles(currentTestCase.ScreenShotPath); foreach (string file in filePaths) { objAttachment = objAttachmentFactory.AddItem(DBNull.Value); objAttachment.FileName = file; objAttachment.Type = 1; objAttachment.Post(); } break; // } } } }
public static bool ExportBusinessFlowToQC(BusinessFlow businessFlow, TestSet mappedTestSet, string uploadPath, ObservableList <ExternalItemFieldBase> testSetFields, ref string result) { TestSet testSet; ObservableList <ActivitiesGroup> existingActivitiesGroups = new ObservableList <ActivitiesGroup>(); try { if (mappedTestSet == null) { //##create new Test Set in QC TestSetFactory TestSetF = (TestSetFactory)mTDConn.TestSetFactory; testSet = (TestSet)TestSetF.AddItem(System.DBNull.Value); //set the upload path TestSetTreeManager treeM = (TestSetTreeManager)mTDConn.TestSetTreeManager; ISysTreeNode testSetParentFolder = (ISysTreeNode)treeM.get_NodeByPath(uploadPath); testSet.TestSetFolder = testSetParentFolder.NodeID; } else { //##update existing test set //testSet = mappedTestSet; testSet = ImportFromQC.GetQCTestSet(mappedTestSet.ID.ToString()); TSTestFactory testsF = (TSTestFactory)testSet.TSTestFactory; List tsTestsList = testsF.NewList(""); foreach (TSTest tsTest in tsTestsList) { ActivitiesGroup ag = businessFlow.ActivitiesGroups.Where(x => (x.ExternalID == tsTest.TestId.ToString() && x.ExternalID2 == tsTest.ID.ToString())).FirstOrDefault(); if (ag == null) { testsF.RemoveItem(tsTest.ID); } else { existingActivitiesGroups.Add(ag); } } } //set item fields foreach (ExternalItemFieldBase field in testSetFields) { if (field.ToUpdate || field.Mandatory) { if (string.IsNullOrEmpty(field.SelectedValue) == false && field.SelectedValue != "NA") { testSet[field.ID] = field.SelectedValue; } else { try { testSet[field.ID] = "NA"; } catch { } } } } //post the test set testSet.Name = businessFlow.Name; try { testSet.Post(); } catch (Exception ex) { if (ex.Message.Contains("The Test Set already exists")) { result = "Cannot export " + GingerDicser.GetTermResValue(eTermResKey.BusinessFlow) + "- The Test Set already exists in the selected folder. "; Reporter.ToLog(eLogLevel.ERROR, result, ex); return(false); } //Searching for the testset in case it was created in ALM although getting exception TestSetFactory TSetFact = mTDConn.TestSetFactory; TDFilter tsFilter = TSetFact.Filter; TestSetTreeManager treeM = (TestSetTreeManager)mTDConn.TestSetTreeManager; ISysTreeNode testSetParentFolder = (ISysTreeNode)treeM.get_NodeByPath(uploadPath); try { tsFilter["CY_FOLDER_ID"] = "" + testSetParentFolder.NodeID + ""; } catch (Exception e) { tsFilter["CY_FOLDER_ID"] = "\"" + testSetParentFolder.Path.ToString() + "\""; Reporter.ToLog(eLogLevel.ERROR, $"Method - {MethodBase.GetCurrentMethod().Name}, Error - {e.Message}", e); } List TestsetList = TSetFact.NewList(tsFilter.Text); foreach (TestSet set in TestsetList) { if (set.Name == businessFlow.Name) { testSet = set; break; } } } businessFlow.ExternalID = testSet.ID.ToString(); //Add missing test cases TSTestFactory testCasesF = testSet.TSTestFactory; foreach (ActivitiesGroup ag in businessFlow.ActivitiesGroups) { if (existingActivitiesGroups.Contains(ag) == false && string.IsNullOrEmpty(ag.ExternalID) == false && ImportFromQC.GetQCTest(ag.ExternalID) != null) { TSTest tsTest = testCasesF.AddItem(ag.ExternalID); if (tsTest != null) { ag.ExternalID2 = tsTest.ID;//the test case instance ID in the test set- used for exporting the execution details } } else { foreach (ActivityIdentifiers actIdent in ag.ActivitiesIdentifiers) { ExportActivityAsTestStep(ImportFromQC.GetQCTest(ag.ExternalID), (Activity)actIdent.IdentifiedActivity); } } } return(true); } catch (Exception ex) { result = "Unexpected error occurred- " + ex.Message; Reporter.ToLog(eLogLevel.ERROR, "Failed to export the " + GingerDicser.GetTermResValue(eTermResKey.BusinessFlow) + " to QC/ALM", ex); return(false); } }
public int Create( int TestSetId, int TestConfigId, TestStatus Status, string RunName, bool DraftRun, string[] Additional = default(string[])) { int runId = 0; try { if (!Connect(ServerUrl, Username, Password, Domain, Project)) { return(0); } if (!CanPerformAction("ac_run_manual_test")) { rr.AddErrorLine("Error: The user does not have permission to execute tests"); return(0); } string runStatus = NormalizeTestStatus(Status); TestSetFactory tsFact = tdc.TestSetFactory; TestSet targetTestSet = tsFact[TestSetId]; TSTestFactory tsTestFactory = targetTestSet.TSTestFactory; TDFilter filter = tsTestFactory.Filter; filter["TC_TEST_CONFIG_ID"] = TestConfigId.ToString(); List testInstanceList = filter.NewList(); if (testInstanceList.Count == 1) { TSTest testInstance = testInstanceList[1]; RunFactory runFact = testInstance.RunFactory; //Best practice is to provide a null value, but an ALM bug keeps the test status of the test instance unchanged unless a name is provided Run testRun = runFact.AddItem(RunName + "_" + DateTime.Now); testRun.Status = runStatus; if (DraftRun) { testRun["RN_DRAFT"] = "Y"; } //Set additional field values if (Additional != default(string[])) { foreach (string fieldPair in Additional) { string[] tempFieldArray = fieldPair.Split(new[] { ";;" }, StringSplitOptions.None); testRun[tempFieldArray[0]] = tempFieldArray[1]; } } testRun.Post(); runId = testRun.ID; //Console.Out.WriteLine(runId); } else if (testInstanceList.Count == 0) { rr.AddErrorLine("Error: The test configuration ID does not exist in the test set."); } else { //More than one instace of the test configuration exists in the test set //The integration cannot support duplicates rr.AddErrorLine("Error: multiple instances of the test configuration exist in this test set."); } } catch (COMException ce) { rr.AddErrorLine(HandleException(ce)); } finally { Disconnect(); } return(runId); }