public NUnitTestSuite(NUnitAssemblyTestSuite rootSuite, TestInfo tinfo) : base(tinfo.Name) { fullName = tinfo.PathName != null && tinfo.PathName.Length > 0 ? tinfo.PathName + "." + tinfo.Name : tinfo.Name; this.testInfo = tinfo; this.rootSuite = rootSuite; }
protected void CreateTest_OkButtonClicked(object sender, EventArgs e) { Page.Validate("create_test"); if (Page.IsValid) { if (this.ClassId > 0) { TestInfo entity = new TestInfo(); entity.ClassId = this.ClassId; entity.Title = TitleTextBox.Text; entity.MinPassingScorePercentage = Convert.ToInt32(PassingPercentageTextBox.Text); entity.Description = DescriptionTextBox.Value; entity.Visible = VisibleCheckBox.Checked; entity.CreatedTimestamp = DateTime.Now; entity.UpdatedTimestamp = DateTime.Now; bool result = TestController.InsertTest(ref entity); if (result) { DataBind(); OnTestCreated(EventArgs.Empty); entity = null; Response.Redirect("Tests.aspx?ClassId=" + this.ClassId); } } } else { ValidationHelper.SetFocusToFirstError(this.Page, "create_test"); } }
public NUnitTestCase(NUnitAssemblyTestSuite rootSuite, TestInfo tinfo) : base(tinfo.Name) { className = tinfo.PathName; fullName = tinfo.PathName + "." + tinfo.Name; this.rootSuite = rootSuite; }
private int GetEngineTest(TestInfo testcase) { var engine = CreateEngine(testcase.Options); var source = engine.CreateScriptSourceFromString( testcase.Text, testcase.Path, SourceCodeKind.File); return GetResult(engine, source); }
public void SetUp() { dispatcher = new TestEventDispatcher(); catcher = new TestEventCatcher( dispatcher ); test = new TestInfo( new TestSuite( TESTNAME ) ); result = new TestResult( test ); exception = new Exception( MESSAGE ); }
public int RunTest(TestInfo testcase) { switch(testcase.Options.IsolationLevel) { case TestIsolationLevel.DEFAULT: return GetScopeTest(testcase); case TestIsolationLevel.ENGINE: return GetEngineTest(testcase); default: throw new ArgumentException(String.Format("IsolationLevel {0} is not supported.", testcase.Options.IsolationLevel.ToString()), "testcase.IsolationLevel"); } }
public void SetUp() { testSuite = new TestSuite("MyTestSuite"); testFixture = TestFixtureBuilder.BuildFrom( typeof( MockTestFixture ) ); testSuite.Add( testFixture ); suiteInfo = new TestInfo( testSuite ); fixtureInfo = new TestInfo( testFixture ); testCase = (NUnit.Core.Test)testFixture.Tests[0]; testCaseInfo = new TestInfo( testCase ); }
public void CreateComparerTest() { var list1 = new List<TestInfo> { new TestInfo { Id = Guid.NewGuid(), Name = "a" }, new TestInfo { Id = Guid.NewGuid(), Name = "b" }, }; var info = new TestInfo { Id = Guid.NewGuid(), Name = "a" }; var comparer = Equality<TestInfo>.CreateComparer(m => m.Name); Assert.IsTrue(list1.Contains(info, comparer)); Assert.IsTrue(list1.Contains(info, r => r.Name)); }
public TestInfo GetInfo(TestInfo info) { if (info == null) { throw new ArgumentNullException("info"); } // populate data to be bound to a JBST info.TimeStamp = this.GetTimeStamp(); info.Machine = this.GetMachine(); return info; }
public TestPageWP() { this.InitializeComponent(); this.navigationHelper = new NavigationHelper(this); this.navigationHelper.LoadState += navigationHelper_LoadState; this.navigationHelper.SaveState += navigationHelper_SaveState; this.NavigationCacheMode = Windows.UI.Xaml.Navigation.NavigationCacheMode.Disabled; //have we initialize the main source? hs = new TestInfo(); hs.position = 1; hs.result = 0; }
public void Should_Insert_Measurement_Code_In_JUnit4_Code(string fileName) { var info = new TestInfo(Fixture.GetTestInputPath()); var inPath = Path.Combine(Fixture.GetTestInputPath(), fileName); var code = OccfCodeGenerator.GetIdentifiedTest( new FileInfo(inPath), info, LanguageSupports.GetCoverageModeByClassName("Java")); var expPath = Path.Combine(Fixture.GetTestExpectationPath(), fileName); using (var reader = new StreamReader(expPath)) { var expected = reader.ReadToEnd(); try { Assert.That(code, Is.EqualTo(expected)); } catch { var path = Fixture.GetOutputPath(fileName); File.WriteAllText(path, code); throw; } } }
private void CheckConstructionFromTest( ITest expected ) { TestInfo actual = new TestInfo( expected ); Assert.AreEqual( expected.TestName, actual.TestName ); Assert.AreEqual( expected.TestType, actual.TestType ); Assert.AreEqual( expected.RunState, actual.RunState ); Assert.AreEqual( expected.IsSuite, actual.IsSuite, "IsSuite" ); Assert.AreEqual( expected.TestCount, actual.TestCount, "TestCount" ); if ( expected.Categories == null ) Assert.AreEqual( 0, actual.Categories.Count, "Categories" ); else { Assert.AreEqual( expected.Categories.Count, actual.Categories.Count, "Categories" ); for ( int index = 0; index < expected.Categories.Count; index++ ) Assert.AreEqual( expected.Categories[index], actual.Categories[index], "Category {0}", index ); } Assert.AreEqual( expected.TestName, actual.TestName, "TestName" ); }
void CreatePoint(int fingerID, Vector2 pos) { TestInfo tesInfo; if (pointers.ContainsKey(fingerID)) { tesInfo = pointers[fingerID]; tesInfo.pointObj.gameObject.SetActive(true); tesInfo.infoObj.gameObject.SetActive(true); } else { Transform pointer = Instantiate<Transform>(pointImage); Text info = Instantiate<Text>(infoText); pointer.SetParent(canvas.transform); info.transform.SetParent(infoLayout.transform); tesInfo = new TestInfo(pointer, info); pointers.Add(fingerID, tesInfo); } tesInfo.pointObj.position = pos; tesInfo.infoObj.text = string.Format("ID:{0} Pos:{1}", fingerID, pos); }
public TestRunResults RunTest(TestInfo scenarioInf, ref string errorReason, RunCancelledDelegate runCancelled) { string scenarioPath = scenarioInf.TestPath; //prepare the instance that will contain test results for JUnit TestRunResults runDesc = new TestRunResults(); ConsoleWriter.ActiveTestRun = runDesc; ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running: " + scenarioPath); runDesc.TestType = TestType.LoadRunner.ToString(); _resultsFolder = Helper.GetTempDir(); if (scenarioInf.ReportPath != null && !scenarioInf.ReportPath.Equals("")) { _resultsFolder = scenarioInf.ReportPath; } //a directory with this name may already exist. try to delete it. if (Directory.Exists(_resultsFolder)) { try { // Directory.Delete(_resultsFolder, true); DirectoryInfo dir = new DirectoryInfo(_resultsFolder); dir.GetFiles().ToList().ForEach(file => file.Delete()); dir.GetDirectories().ToList().ForEach(subdir => subdir.Delete()); } catch (Exception) { Console.WriteLine(string.Format(Resources.CannotDeleteReportFolder, _resultsFolder)); } } else { try { Directory.CreateDirectory(_resultsFolder); } catch (Exception) { errorReason = string.Format(Resources.FailedToCreateTempDirError, _resultsFolder); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runDesc); } } //create LRR folder: _controller_result_dir = Path.Combine(_resultsFolder, LRR_FOLDER); Directory.CreateDirectory(_controller_result_dir); //init result params runDesc.ErrorDesc = errorReason; runDesc.TestPath = scenarioPath; ConsoleWriter.WriteLine(runDesc.TestPath); runDesc.TestState = TestState.Unknown; if (!Helper.isLoadRunnerInstalled()) { runDesc.TestState = TestState.Error; runDesc.ErrorDesc = string.Format(Resources.LoadRunnerNotInstalled, System.Environment.MachineName); ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runDesc); } //from here on, we may delegate runCancelled(). _runCancelled = runCancelled; //start scenario stop watch Stopwatch scenarioStopWatch = Stopwatch.StartNew(); //set state to running runDesc.TestState = TestState.Running; //and run the scenario bool res = runScenario(scenarioPath, ref errorReason, runCancelled); if (!res) { //runScenario failed. print the error and set test as failed ConsoleWriter.WriteErrLine(errorReason); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; runDesc.Runtime = scenarioStopWatch.Elapsed; //and try to close the controller closeController(); return(runDesc); } else { try { ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); runDesc.ReportLocation = _resultsFolder; ConsoleWriter.WriteLine(Resources.LrAnalysingResults); //close the controller, so Analysis can be opened ConsoleWriter.WriteLine("closing Controller"); closeController(); ConsoleWriter.WriteLine("Controller closed"); //generate report using Analysis: ConsoleWriter.WriteLine("calling analysis report generator"); generateAnalysisReport(runDesc); ConsoleWriter.WriteLine("analysis report generator finished"); //check for errors: if (File.Exists(Path.Combine(_resultsFolder, "errors.xml"))) { checkForErrors(); } ConsoleWriter.WriteLine(Resources.LRErrorsSummary); //count how many ignorable errors and how many fatal errors occured. int ignore = getErrorsCount(ERRORState.Ignore); int fatal = getErrorsCount(ERRORState.Error); runDesc.FatalErrors = fatal; ConsoleWriter.WriteLine(String.Format(Resources.LrErrorSummeryNum, ignore, fatal)); ConsoleWriter.WriteLine(""); if (_errors != null && _errors.Count > 0) { foreach (ERRORState state in Enum.GetValues(typeof(ERRORState))) { ConsoleWriter.WriteLine(printErrorSummary(state)); } } //if scenario ended with fatal errors, change test state if (fatal > 0) { ConsoleWriter.WriteErrLine(string.Format(Resources.LRTestFailDueToFatalErrors, fatal)); errorReason = buildErrorReasonForErrors(); runDesc.TestState = TestState.Failed; } else if (ignore > 0) { ConsoleWriter.WriteLine(string.Format(Resources.LRTestWarningDueToIgnoredErrors, ignore)); runDesc.HasWarnings = true; runDesc.TestState = TestState.Warning; } else { Console.WriteLine(Resources.LRTestPassed); runDesc.TestState = TestState.Passed; } } catch (Exception e) { ConsoleWriter.WriteException(Resources.LRExceptionInAnalysisRunner, e); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = Resources.LRExceptionInAnalysisRunner; runDesc.Runtime = scenarioStopWatch.Elapsed; } //runDesc.ReportLocation = _resultsFolder; } runDesc.Runtime = scenarioStopWatch.Elapsed; if (!string.IsNullOrEmpty(errorReason)) { runDesc.ErrorDesc = errorReason; } KillController(); return(runDesc); }
private string GetFakePassReport(TestInfo testInfo, ReportAssembly reportAssembly) { Assembly assembly = reportAssembly.Assembly; XslCompiledTransform transform = FakePassTransform; XmlReaderSettings readerSettings = new XmlReaderSettings(); string baseResourceName = String.Concat(testInfo.TestNamespace, ".", testInfo.TestClassName, ".", testInfo.TestName); foreach (TestClass testclass in reportAssembly.TestClasses) { if (testclass.Name == testInfo.TestClassName) { foreach (Test test in testclass.Tests) { if (test.Name == testInfo.TestName && testclass.TestNamespace == testInfo.TestNamespace) { MemoryStream patchedStream = null; try { using (Stream stream = assembly.GetManifestResourceStream(baseResourceName + ".Report.xml")) { if (stream != null) { using (XmlReader reportReader = XmlReader.Create(stream, readerSettings)) { using (XmlReader diffReader = XmlReader.Create(new StringReader(test.Content), readerSettings)) { patchedStream = new MemoryStream(); new XmlPatch().Patch(reportReader, new UncloseableStream(patchedStream), diffReader); } } } else { return null; } } patchedStream.Position = 0; using (XmlReader failReader = XmlReader.Create(patchedStream, readerSettings)) { using (MemoryStream fakePassStream = new MemoryStream()) { XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.IndentChars = "\t"; writerSettings.CloseOutput = false; using (XmlWriter fakePassWriter = XmlWriter.Create(fakePassStream, writerSettings)) { transform.Transform(failReader, fakePassWriter); } fakePassStream.Position = 0; return new StreamReader(fakePassStream).ReadToEnd(); } } } finally { if (patchedStream != null) { patchedStream.Dispose(); } } } } } } return null; }
/// <summary> /// Return true if test has bugs associated. /// </summary> /// <param name="testInfo">TestInfo</param> /// <returns>true if test has bugs, false otherwise</returns> internal static bool TestHasBugs(TestInfo testInfo) { return(testInfo.Bugs != null && testInfo.Bugs.Count > 0); }
/// <summary> /// Method for running tests in the background. /// </summary> void RunTests() { // types description for test class constructor Type[] types = new Type[] { typeof(TestLaunchParam) }; bool bCompletedNormally = false; // parameters as defined in TestEngine. TestLaunchParam param = new TestLaunchParam(); param.ServiceAddress = _parameters.Address; param.CameraIp = _parameters.CameraIP; param.CameraUUID = _parameters.CameraUUID; param.NIC = _parameters.NetworkInterfaceController; param.MessageTimeout = _parameters.MessageTimeout; param.RebootTimeout = _parameters.RebootTimeout; param.UserName = _parameters.UserName; param.Password = _parameters.Password; param.UseUTCTimestamp = _parameters.UseUTCTimestamp; param.Operator = _parameters.Operator; param.VideoForm = _parameters.VideoForm; param.EnvironmentSettings = _parameters.EnvironmentSettings; param.PTZNodeToken = _parameters.PTZNodeToken; param.Features.AddRange(_parameters.Features); param.UseEmbeddedPassword = _parameters.UseEmbeddedPassword; param.Password1 = _parameters.Password1; param.Password2 = _parameters.Password2; param.OperationDelay = _parameters.OperationDelay; param.RecoveryDelay = _parameters.RecoveryDelay; param.SecureMethod = _parameters.SecureMethod; param.SubscriptionTimeout = _parameters.SubscriptionTimeout; param.EventTopic = _parameters.EventTopic; param.TopicNamespaces = _parameters.TopicNamespaces; param.RelayOutputDelayTimeMonostable = _parameters.RelayOutputDelayTimeMonostable; param.RecordingToken = _parameters.RecordingToken; param.SearchTimeout = _parameters.SearchTimeout; param.AdvancedPrameters = _parameters.AdvancedPrameters; // parameters for constructor. object[] args = new object[] { param }; int current = 0; List <TestInfo> processes = new List <TestInfo>(); if (!_featuresDefined) { processes.Add(FeaturesDefinitionProcess.This); System.Diagnostics.Debug.WriteLine("SECURITY: NONE"); param.Security = Security.None; } else { if (_features.Contains(Feature.Digest)) { System.Diagnostics.Debug.WriteLine("SECURITY: DIGEST"); param.Security = Security.Digest; } else { System.Diagnostics.Debug.WriteLine("SECURITY: WS-USERNAME"); param.Security = Security.WS; } // // we HAVE features => Controller is notified already about profile. // so, if we just run some tests - there is no reason to colour profiles tree // or select tests in test tree. // bool defineTests = _parameters.TestCases.Count == 0 && !_parameters.FeatureDefinition; if (defineTests) { List <Feature> features = new List <Feature>(); features.AddRange(_features); features.AddRange(_undefinedFeatures); // define test cases via features detected List <TestInfo> tests = ConformanceLogicHandler.GetTestsByFeatures(_parameters.AllTestCases, features, _parameters.Conformance); // no tests in list, not a feature definition process - add tests to list _parameters.TestCases.AddRange(tests); // notify anyway (?) // - really, notify if not notified previously... ReportInitializationCompleted(tests, defineTests); } } if (_parameters.TestCases.Count > 0) { if (!(_parameters.TestCases.Count == 1 && _parameters.TestCases[0].ProcessType == ProcessType.FeatureDefinition)) { processes.AddRange(_parameters.TestCases); } } // Go through the list of tests... for (int i = 0; i < processes.Count; i++) { TestInfo testInfo = processes[i]; _currentTestInfo = testInfo; /// "Stop" requested or "Halt" has not been handled at test level. if (_stop) { bCompletedNormally = false; break; } try { // Check if a pause should be done. // If a pause should be done at this point and "Halt" is clicked // during the pause = don't execute next test. bool dispatcherLevelPause; lock (_pauseSync) { dispatcherLevelPause = _dispatcherLevelPause; } if (dispatcherLevelPause) { // Sleep() returns TRUE if pause has been ended by clicking "Resume" and FALSE if "Halt" // button has been clicked. bool bContinue = Sleep(); if (!bContinue) { // Tests execution halted bCompletedNormally = false; break; } } // Report that a test is started if (TestStarted != null) { TestStarted(testInfo); } _currentTest = null; BaseTest test = InitCurrentTest(testInfo, types, args); // check if tests execution should be stopped bool halt; lock (_pauseSync) { halt = _delayedHalt; } if (halt) { bCompletedNormally = false; break; } lock (_pauseSync) { dispatcherLevelPause = _dispatcherLevelPause; } if (dispatcherLevelPause) { // WAIT bool bContinue = Sleep(); if (!bContinue) { bCompletedNormally = false; break; } } // start current test. // (really it means that _currentTest.EntryPoint method is executed synchronously) _currentTest.Start(); current++; // // Feature definition process ended // if (testInfo.ProcessType == ProcessType.FeatureDefinition) { if (test.Halted) { break; } else { ProfilesSupportTest pst = new ProfilesSupportTest(); pst.ProfileDefinitionCompleted += new ProfilesSupportTest.ProfileDefinitionCompletedHandler(pst_ProfileDefinitionCompleted); pst.CheckProfiles(_parameters.Profiles, _features, _scopes); // // if we run only one/several defined test (not Feature definition process, not 0 tests to run), // we'll have to display profiles support // And if profiles support have been displayed already (features defined), there is no need // to notify TestController at all // bool defineTests = _parameters.TestCases.Count == 0 && !_parameters.FeatureDefinition; if (!_featuresDefined || defineTests) { InitConformanceTesting(param); _featuresDefined = true; List <TestInfo> tests = ConformanceLogicHandler.GetTestsByFeatures(_parameters.AllTestCases, param.Features, _parameters.Conformance); if (defineTests) { // define test cases via features detected processes.AddRange(tests); } ReportInitializationCompleted(tests, defineTests); } } } // pause between tests. If "Halt" is clicked during the pause - exit tests execution. if (current != processes.Count) { _currentTest = null; int hndl = WaitHandle.WaitAny(new WaitHandle[] { _haltEvent }, _parameters.TimeBetweenTests); _haltEvent.Reset(); lock (_pauseSync) { halt = _delayedHalt; } if (hndl == 0 || halt) { bCompletedNormally = false; break; } } else { bCompletedNormally = true; } } catch (System.Reflection.TargetException exc) { _currentTest.ExitTest(exc); ReportException(exc); if (testInfo.ProcessType == ProcessType.FeatureDefinition) { break; } } catch (System.ArgumentException exc) { _currentTest.ExitTest(exc); ReportException(exc); } catch (System.Reflection.TargetParameterCountException exc) { _currentTest.ExitTest(exc); ReportException(exc); } catch (System.MethodAccessException exc) { _currentTest.ExitTest(exc); ReportException(exc); } catch (System.InvalidOperationException exc) { _currentTest.ExitTest(exc); ReportException(exc); } catch (Exception exc) { ReportException(exc); } } // all tests are performed. _state = TestState.Idle; if (TestSuiteCompleted != null && !_shutDownInProgress) { TestSuiteCompleted(_parameters, bCompletedNormally); } }
/// <summary> /// Handles selection in tests tree. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void tvTestCases_AfterSelect(object sender, TreeViewEventArgs e) { // !!! Check that this not a group! Expanding group is not a test click! // Running : // if scrolling is disabled: // - current test: __enable scrolling___ // - completed test : show results // - not started test : ___enable scrolling___ and switch to current (?) // if scrolling is enabled: // - completed test: stop scrolling and show results // - current test : do nothing // - not started test : stop scrolling, clear results // Idle : // - show results if exist; // - allow "Run Current" button TreeNode node = tvTestCases.SelectedNode; if (node != null) { if (node.Tag != null) { TestInfo testInfo = (TestInfo)node.Tag; bool showResults = true; if (_controller.Running) { System.Diagnostics.Debug.WriteLine("Controller running"); if (_controller.ScrollingEnabled) { // if not current, stop scrolling // current result will be displayed when scrolling is started if (_controller.CurrentTest != testInfo) { System.Diagnostics.Debug.WriteLine("Disable scrolling"); EnableScrolling(false); tcTestResults.EnableSearch(); currentTestSelected = false; } } else { // if current test - enable scrolling if (_controller.CurrentTest == testInfo) { System.Diagnostics.Debug.WriteLine("Enable scrolling"); EnableScrolling(true); showResults = false; tcTestResults.DisableSearch(); currentTestSelected = true; } } showResults = !_controller.ScrollingEnabled; } if (showResults) { ClearTestInfo(); TestResult log = _controller.GetTestResult(testInfo); if (log != null) { DisplayTestResults(log); } } } else { // Group if (!(_controller.Running && _controller.ScrollingEnabled)) { ClearTestInfo(); } } tvProfiles.SetInactive(); featuresTree.SetInactive(); tsbRunCurrent.Enabled = !_controller.Running; } }
private void CheckEvent(TestAction action, string fileName, TestInfo test) { CheckEvent(action, fileName); Assert.AreEqual(TESTNAME, ((TestEventArgs)catcher.Events[0]).Test.TestName.Name); }
public void testbyNetWork() { try { tcpClient client = new tcpClient(); //建立socket client.createSocket(); //连接服务端 client.connect("127.0.0.1", 8080); TestInfo tiSend = new TestInfo(); assignTestInfo(ref tiSend); //-------------------------encode----------------------- UniPacket encodePack = new UniPacket(); //设置tup版本号,默认是PACKET_TYPE_TUP encodePack.ServantName = "TestInfo"; encodePack.FuncName = "TestInfo"; short tests = -100; int testi = int.MaxValue; long testl = -1000; float testf = -100.032f; string teststr = "teststring"; encodePack.Put("tests", tests); encodePack.Put("testi", testi); encodePack.Put("testl", testl); encodePack.Put("testf", testf); encodePack.Put("teststr", teststr); encodePack.Put <TestInfo>("TestInfo", tiSend); byte[] bufferS = encodePack.Encode(); Console.WriteLine("打印发送 buffer"); int ret = client.send(bufferS); StringBuilder sb = new StringBuilder(); tiSend.Display(sb, 0); Console.WriteLine(sb.ToString()); Console.WriteLine("发送大小: " + ret); byte[] bufferR = new byte[8192]; ret = client.receive(bufferR); Console.WriteLine("打印接收 buffer"); Console.WriteLine("接收大小:" + ret); //----------------------decode---------------------------- if (ret > sizeof(int)) { UniPacket decodePack = new UniPacket(); decodePack.Decode(bufferR); TestInfo tiRecv = new TestInfo(); short dtests = 0; int dtesti = 0; long dtestl = 0; float dtestf = 0.0f; string dteststr = ""; //使用新的Get<T>(string Name, T DefaultObj) dtests = decodePack.Get <short>("tests", dtests); dtesti = decodePack.Get <int>("testi", dtesti); dtestl = decodePack.Get <long>("testl", dtestl); dtestf = decodePack.Get <float>("testf", dtestf); dteststr = decodePack.Get <string>("teststr", dteststr); tiRecv = decodePack.Get <TestInfo>("TestInfo", tiRecv); sb.Clear(); tiRecv.Display(sb, 0); Console.WriteLine("end:" + sb.ToString()); Console.WriteLine("dtests:" + dtests); Console.WriteLine("dtesti:" + dtesti); Console.WriteLine("dtestl:" + dtestl); Console.WriteLine("dtestf:" + dtestf); Console.WriteLine("dteststr:" + dteststr); Console.ReadLine(); } client.close(); } catch (Exception e) { Console.WriteLine("testbyNetWork: " + e.Message); } }
/// <summary> /// Construct a TestNode given a test /// </summary> public TestSuiteTreeNode(TestInfo test) : base(test.TestName.Name) { this.test = test; UpdateImageIndex(); }
private ReportAssembly GetReportAssembly(TestInfo testInfo) { ReportAssembly reportAssembly = new ReportAssembly(); foreach (ReportSuite reportSuite in myReportSuites) { if (reportSuite.Name == testInfo.SuiteName) { foreach (ReportAssembly assembly in reportSuite.Assemblies) { if (assembly.Location == testInfo.AssemblyLocation) { reportAssembly = assembly; } } } } Debug.Assert(reportAssembly.Location != null, "Assembly failed to load. "); return reportAssembly; }
private string FindMissingCompare(TestInfo testInfo, Assembly assembly, string compareExtension) { string baseResourceName = String.Concat(testInfo.TestNamespace, ".", testInfo.TestClassName, ".", testInfo.TestName); StringReader diffReader = new StringReader(testInfo.Content); using (MemoryStream patchedStream = new MemoryStream()) { using (Stream reportStream = assembly.GetManifestResourceStream(string.Format("{0}.Report.xml", baseResourceName))) { if (reportStream == null) { return "Report File Is Missing."; } using (XmlReader reportReader = XmlReader.Create(reportStream)) { using (XmlReader diffreader = XmlReader.Create(diffReader)) { new XmlPatch().Patch(reportReader, patchedStream, diffreader); } } } patchedStream.Position = 0; using (XmlReader patchedReader = XmlReader.Create(patchedStream)) { return FindMissingCompare(patchedReader, compareExtension); } } }
private void PrepareNextTest() { byte[] r = new byte[1]; gen.GetBytes(r); AB x = ((r[0] & 1) == 0) ? AB.A : AB.B; TestInfo ti = new TestInfo(x); m_testInfoList.Add(ti); }
public override void Run() { try { Stopwatch timer = new Stopwatch(); long warmupDuration = (long)_attr.WarmupDuration * Stopwatch.Frequency; long testDuration = (long)_attr.TestDuration * Stopwatch.Frequency; int threads = _attr.Threads; TestInfo[] info = new TestInfo[threads]; ConstructorInfo targetConstructor = _type.GetConstructor(Type.EmptyTypes); for (int i = 0; i < threads; i++) { info[i] = new TestInfo(); info[i]._instance = targetConstructor.Invoke(null); info[i]._delegateTest = CreateTestMethodDelegate(); ExecuteSetupPhase(info[i]._instance); } _firstException = null; _continue = true; _rps = 0; for (int i = 0; i < threads; i++) { Interlocked.Increment(ref _threadsRunning); ThreadPool.QueueUserWorkItem(_waitCallback, info[i]); } timer.Reset(); timer.Start(); while (timer.ElapsedTicks < warmupDuration) { Thread.Sleep(1000); } int warmupRequests = Interlocked.Exchange(ref _rps, 0); timer.Reset(); timer.Start(); TestMetrics.StartCollection(); while (timer.ElapsedTicks < testDuration) { Thread.Sleep(1000); } int requests = Interlocked.Exchange(ref _rps, 0); double elapsedSeconds = timer.ElapsedTicks / Stopwatch.Frequency; TestMetrics.StopCollection(); _continue = false; while (_threadsRunning > 0) { Thread.Sleep(1000); } for (int i = 0; i < threads; i++) { ExecuteCleanupPhase(info[i]._instance); } double rps = (double)requests / elapsedSeconds; if (_firstException == null) { LogTest(rps); } else { LogTestFailure(_firstException.ToString()); } } catch (TargetInvocationException e) { LogTestFailure(e.InnerException.ToString()); } catch (Exception e) { LogTestFailure(e.ToString()); } }
//these functions use the current FileInfo object to create //temporary files. The testInfo object must be set before they are called. private string GetActualCompare(TestInfo testInfo, ReportAssembly reportAssembly, string missingReport, string extension) { Assembly assembly = reportAssembly.Assembly; foreach (TestClass testclass in reportAssembly.TestClasses) { if (testclass.Name == testInfo.TestClassName) { foreach (Test test in testclass.Tests) { if (test.Name == testInfo.TestName && testclass.TestNamespace == testInfo.TestNamespace) { string result = test.Result; string baseResourceName = testInfo.BaseFileName; XmlReaderSettings readerSettings = new XmlReaderSettings(); if (result == ReportDiffgram || missingReport != null) { using (Stream reportStream = missingReport != null ? null : assembly.GetManifestResourceStream(baseResourceName + ".Report.xml")) { if (reportStream != null || missingReport != null) { XmlPatch patcher = new XmlPatch(); using (XmlReader reportReader = missingReport != null ? XmlReader.Create(new StringReader(missingReport)) : XmlReader.Create(reportStream, readerSettings)) { StringReader compareStringReader; if (missingReport != null) { compareStringReader = ExtractStringReaderFromCompareByName(reportReader, extension); } else { using (XmlReader diffReader = XmlReader.Create(new StringReader(test.Content), readerSettings)) { using (MemoryStream compareStream = new MemoryStream()) { patcher.Patch(reportReader, new UncloseableStream(compareStream), diffReader); compareStream.Position = 0; //the filestream is now the expected test report, containing a compare. //Now to get that compare... compareStringReader = ExtractStringReaderFromCompareByName(XmlReader.Create(compareStream), extension); } } } //and apply the diffgram within in to the original Compare.orm using (Stream compareStream = assembly.GetManifestResourceStream(string.Format("{0}.{1}", baseResourceName, extension))) { if (compareStream != null) { using (XmlReader compareReader = XmlReader.Create(compareStream, readerSettings)) { using (MemoryStream patchedCompareStream = new MemoryStream()) { using (XmlReader reportCompareReader = XmlReader.Create(compareStringReader, readerSettings)) { //an exception thrown here is generally caused by the report suite being outdated. patcher.Patch(compareReader, new UncloseableStream(patchedCompareStream), reportCompareReader); } patchedCompareStream.Position = 0; using (MemoryStream patchedOutputStream = new MemoryStream()) { using (XmlReader reader = XmlReader.Create(patchedCompareStream)) { XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.IndentChars = "\t"; writerSettings.CloseOutput = false; using (XmlWriter writer = XmlWriter.Create(patchedOutputStream, writerSettings)) { FormatXml(reader, writer); } } patchedOutputStream.Position = 0; return new StreamReader(patchedOutputStream).ReadToEnd(); } } } } } } } } } return null; } } } } Debug.Fail("Internal Error. function: GetActualCompare."); return null; }
public void Execute(ref TestInfo myTestInfo, ref UUTData myUUTData, ref CommonData myCommonData, List <double> listOfLeakRate, List <double> listOfTempFactor, List <double> listOfTemp) { try { var slope = Convert.ToDouble(myTestInfo.TestParams[1].Value); var refTemp = Convert.ToDouble(myTestInfo.TestParams[2].Value); switch (myTestInfo.TestLabel) { case "FinalizeLeakAndFactor": { Trace.WriteLine(string.Format("Total {0} measurements for DUT Leak Rate:-", listOfLeakRate.Count())); for (int i = 0; i < listOfLeakRate.Count(); i++) { Trace.WriteLine(String.Format(" Leak Rate #{0} = {1} std cc/s", i + 1, listOfLeakRate[i])); } var averageLeak = listOfLeakRate.Average(); Trace.WriteLine("Average Leak Rate = " + averageLeak.ToString()); var averageLeakSci = Math.Round(averageLeak, 9); Trace.WriteLine("Average Leak Rate = " + averageLeakSci.ToString()); Trace.WriteLine(string.Format("Total {0} measurements for DUT Temperature Factor:-", listOfTempFactor.Count())); for (int i = 0; i < listOfTempFactor.Count(); i++) { Trace.WriteLine(String.Format(" Temp Factor #{0} = {1} deg C", i + 1, listOfTempFactor[i])); } var averageFactor = listOfTempFactor.Average(); Trace.WriteLine("Average Temp Factor = " + averageFactor.ToString()); var averageFactorSci = Math.Round(averageFactor, 2); Trace.WriteLine("Average Temp Factor = " + averageFactorSci.ToString()); Trace.WriteLine(string.Format("Total {0} measurements for DUT Temperature :-", listOfTemp.Count())); for (int i = 0; i < listOfTemp.Count(); i++) { Trace.WriteLine(String.Format(" Temperature #{0} = {1} deg C", i + 1, listOfTemp[i])); } var averageTemp = listOfTemp.Average(); Trace.WriteLine("Average Temp = " + averageTemp.ToString()); var averageTempSci = Math.Round(averageTemp, 2); Trace.WriteLine("Average Temp = " + averageTempSci.ToString()); Trace.WriteLine(string.Format("Calculating DUT Normalized Leak Rate at {0} Deg Celcius reference temperature", refTemp)); var normLeakRateDouble = TestHelper.CalculateNormLeakRate(slope, refTemp, averageLeak, averageTemp); Trace.WriteLine("Normalized Leak Rate = " + normLeakRateDouble.ToString()); myTestInfo.ResultsParams[1].Result = averageLeakSci.ToString(); myTestInfo.ResultsParams[2].Result = Math.Round(averageFactorSci, 1).ToString(); myTestInfo.ResultsParams[3].Result = Math.Round(averageTempSci, 1).ToString(); myTestInfo.ResultsParams[4].Result = Math.Round(normLeakRateDouble, 9).ToString(); } break; case "SubmitSerialNumber": { } break; case "SubmitTestData": { } break; default: break; } } catch (Exception) { throw; } }
public static TestInfo DoSeq5_4(VSLeakDetector myLD, ref TestInfo MyTestInfo, ref UUTData myuutdata) { //VSLeakDetector myLD = new VSLeakDetector(comPort); //myLD.iteSlot = iteSlot; string retval; Boolean status = false; int step = 1; Helper.comPort = comPort; try { switch (MyTestInfo.TestLabel) { case "5.4.1 uut_config": { step = 1; while (step <= MyTestInfo.ResultsParams.NumResultParams) { //@@ Enable Auto ranging mode @@// Trace.WriteLine(iteSlot + "Enable auto ranging mode..."); status = Helper.DoThis(myLD, ref MyTestInfo, "AUTO", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Turns the auto sequencer function OFF @@// Trace.WriteLine(iteSlot + "Disable auto sequencer function..."); status = Helper.DoThis(myLD, ref MyTestInfo, "DISABLE-SEQUENCER", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Set test preessure to 1.0 Torr @@// Trace.WriteLine(iteSlot + "Set test pressure to unity"); status = Helper.DoThis(myLD, ref MyTestInfo, "10.0E-0 INIT-CL-XFER", "ok", step, "ok"); step++; Thread.Sleep(2000); /*@@ An integer that scales the leak rate to account for deviations in helium compression ratios * between contraflow and midstage modes in fast turbo speed @@*/ Trace.WriteLine(iteSlot + "Scales the leak rate to account for deviations in helium compression ratios betwen Test & FineTest modes in fast turbo speed..."); status = Helper.DoThis(myLD, ref MyTestInfo, "90 INIT-FAST-VGAIN", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Sets which type of calibration will be run if the CALIBRATE command is initiated. Preceded by 0 or 1, 0 = FULL cal, 1 = FAST cal @@// Trace.WriteLine(iteSlot + "Run full calibration..."); status = Helper.DoThis(myLD, ref MyTestInfo, "0 INIT-QUICK-CAL", "ok", step, "0"); step++; Thread.Sleep(2000); //@@ GETeeTCs @@// Trace.WriteLine(iteSlot + "Send command: GETeeTCs..."); status = Helper.DoThis(myLD, ref MyTestInfo, "GETeeTCs", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ ON Softstart @@// Trace.WriteLine(iteSlot + "Informs turbo to start with softstart ON..."); status = Helper.DoThis(myLD, ref MyTestInfo, "SOFTSTARTON", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Prompt the operator to prepare to listen for the valves actuating @@// Trace.WriteLine(iteSlot + "Ready to listen for the valves actuating..."); string display = "Seq 5.4.1: \nPlease prepare to listen for the valve actuating.\n\nEnter 'ok' to proceed the test once ready."; retval = Helper.LabelDisplay(display); if (retval == "ok") { MyTestInfo.ResultsParams[step].Result = "ok"; Trace.WriteLine(iteSlot + "Test point complete."); } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; Trace.WriteLine(iteSlot + "Test point failed."); } step++; Thread.Sleep(2000); //@@ Open the vent valve and a sound shall be heard when changing state @@// Trace.WriteLine(iteSlot + "Opening vent valve..."); status = Helper.DoThis(myLD, ref MyTestInfo, "VENT_VALVE OPEN", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Verify audible run prompt @@// Trace.WriteLine(iteSlot + "Verify whether the actuation is heard during the opening of the vent valve..."); display = "Seq 5.4.1: \nEnter 'ok' if the actuation is heard when the valve is opened.\n\nEnter 'no' if nothing is heard after the valve has opened."; retval = Helper.LabelDisplay(display); if (retval == "ok") { MyTestInfo.ResultsParams[step].Result = "ok"; Trace.WriteLine(iteSlot + "Test point complete."); } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; //throw new Exception("Test point failed."); } step++; Thread.Sleep(2000); } break; } case "5.4.2 TestPortConvectorr_ATM": case "5.4.14 TestPortConvectorr_ATM": { step = 1; // wait for system ready first Trace.WriteLine(iteSlot + "Waiting for system ready..."); var sysReady = Helper.Wait_SystemReady(myLD); Trace.WriteLine(iteSlot + "... ready!"); Thread.Sleep(2000); // delay two seconds //while (step <= MyTestInfo.ResultsParams.NumResultParams) ///{ //@@ Sets the current test port thermocouple reading to represent atmospheric pressure when the test port is exposed to atmosphere @@// Trace.WriteLine(iteSlot + "Sets the current test port thermocouple reading to ATM when exposing to atmosphere..."); status = Helper.DoThis(myLD, ref MyTestInfo, "TPTCATM", "ok", step, "ok"); step++; Thread.Sleep(2000); /*@@ Obtain pressure level. Reports two lines. each begins with a <cr><lf>. * The first line consists of the words test port TC followed by a number in mTorr. The second line consists of the words system TC followed by a number in uTorr. @@*/ Trace.WriteLine(iteSlot + "Obtain and verify the TP and system pressure level..."); retval = Helper.SendCommand(myLD, ref status, "?PRESSURES", "ok"); if (status == true) { //Obtain test port pressure level string[] response = retval.Split(new string[] { "(mTorr): ", "\r\nSpectrometer" }, StringSplitOptions.RemoveEmptyEntries); //obtain system pressure level string[] response2 = retval.Split(new string[] { "Spectrometer (uTorr): ", " ok\r\n" }, StringSplitOptions.RemoveEmptyEntries); Pressure = Convert.ToInt32(response[1]); System_pressure = Convert.ToInt32(response2[1]); Trace.WriteLine(iteSlot + "Pressure: " + Pressure + "mTorr System Pressure: " + System_pressure + "uTorr"); // commented out below manual limit checking, use test executive to do the limit checking and display the result correctly MyTestInfo.ResultsParams[2].Result = Pressure.ToString(); MyTestInfo.ResultsParams[3].Result = system_pressure.ToString(); //if (Pressure >= 700000 && Pressure <= 760000 && System_pressure < 2) //{ // MyTestInfo.ResultsParams[step].Result = "ok"; // Trace.WriteLine(iteSlot + "Test point complete."); //} //else //{ // MyTestInfo.ResultsParams[step].Result = "FAILED"; // //throw new Exception("Test point failed."); //} } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; throw new Exception("Test point failed."); } step++; Thread.Sleep(5000); //} } break; case "5.4.7 turbo_pump Init": { step = 1; while (step <= MyTestInfo.ResultsParams.NumResultParams) { //@@ Access full command @@// Trace.WriteLine(iteSlot + "Access to full command..."); status = Helper.DoThis(myLD, ref MyTestInfo, "XYZZY", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Verify the state of softstart @@// Trace.WriteLine(iteSlot + "Verify the status of softstart..."); //status = Helper.DoThis(ref MyTestInfo, "?SOFTSTART", "on ok", step, "ok"); status = Helper.DoThis(myLD, ref MyTestInfo, "?SOFTSTART", new string[] { "on", "ok" }, step, "ok"); // Hairus changed to check array contains 26 Jul 17 Thread.Sleep(6000); step++; //(MOD: It is necessary to wait for the UUT to fully stabilized before roughing the system) Trace.WriteLine(iteSlot + "Wait for system to get ready..."); Helper.Wait_SystemReady(myLD); //@@ Causes the leak detector to begin roughing on the test port only @@// Trace.WriteLine(iteSlot + "Rough the UUT..."); status = Helper.DoThis(myLD, ref MyTestInfo, "ROUGH", "ok", step, "ok"); step++; Thread.Sleep(2000); //@@ Obtain the details about the state of the turbo pump @@// Trace.WriteLine(iteSlot + "Verify the status and condition of the turbo pump..."); retval = Helper.SendCommand(myLD, ref status, "?TURBO", "Turbo Ready", "Turbo No Fault"); if (status == true) { Trace.WriteLine(iteSlot + "Test point complete."); MyTestInfo.ResultsParams[step].Result = "ok"; //string[] response = retval.Split(new string[] { "(RPM): ", " \r\nTurbo Temp" }, StringSplitOptions.RemoveEmptyEntries); //Speed = Convert.ToInt32(response[1]); string[] newResponse = retval.Split(new string[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries); string turboSpeedResponse = newResponse.SingleOrDefault(x => x.Contains("(RPM)")); string speedText = turboSpeedResponse.Split(':').LastOrDefault(); Speed = Convert.ToInt32(speedText); } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; throw new Exception("Test point failed."); } step++; //@@ Retrieve and verify the turbo pump speed in RPM @@// Trace.WriteLine(iteSlot + "Verify the measured speed of the turbo pump in RPM...(Measured speed = " + Speed + ")"); if (Speed >= Convert.ToInt32(MyTestInfo.ResultsParams[step].SpecMin) && Speed <= Convert.ToInt32(MyTestInfo.ResultsParams[step].SpecMax)) { MyTestInfo.ResultsParams[step].Result = Convert.ToString(Speed); MyTestInfo.ResultsParams[step].Nominal = Convert.ToString(Speed); Trace.WriteLine(iteSlot + "Test point complete."); } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; throw new Exception("Test point failed."); } step++; Thread.Sleep(2000); } break; } case "5.4.10 Valve State": { step = 1; while (step <= MyTestInfo.ResultsParams.NumResultParams) { //@@ Prompt user to press ZERO if unit is stuck in CONTRAFLOW @@// Trace.WriteLine(iteSlot + "Auto 'ZERO' if unit is stuck in TEST mode..."); status = Helper.Stuck_Contra(myLD); if (status == true) { MyTestInfo.ResultsParams[step].Result = "ok"; Trace.WriteLine(iteSlot + "Test point complete."); } step++; //@@ Turn softstart OFF @@// Trace.WriteLine(iteSlot + "Informs turbo to start with softstart OFF..."); status = Helper.DoThis(myLD, ref MyTestInfo, "SOFTSTARTOFF", "ok", step, "ok"); step++; Thread.Sleep(30000); //@@ Verify Valve State @@// Trace.WriteLine(iteSlot + "Verify the valve state..."); status = Helper.DoThis(myLD, ref MyTestInfo, "?VALVESTATE", "MIDSTAGE", step, "ok"); step++; } break; } case "5.4.11 IMG100_Test": { recheck: string XGSstatus = SQL_XGS.Read_XGS_status(); if (XGSstatus != "No") { MessageBox.Show("XGS locked", "warning", MessageBoxButtons.OK); Thread.Sleep(5000); goto recheck; } step = 1; string slotNumber = myuutdata.Options; string imgCode = "IMG" + slotNumber; string cnvCode = "GATE" + slotNumber; string comm = "COM6"; //@@ IMG 100 Test, to ensure the pressure contained inside the UUT is below 5E-4 Torr @@// // DB close IMG 100 YS WONG to add in SQL_XGS.Update_XGS(slotNumber, "Yes"); WaitIMG100HighVacuumTest my_waitIMG100 = new WaitIMG100HighVacuumTest(); var pressure_reading = my_waitIMG100.GetHighVacuumReading(comm, "00", imgCode, cnvCode); MyTestInfo.ResultsParams[step].Result = pressure_reading.ToString(); break; } case "5.4.12 TestPortConvectorr_Zero": { step = 1; //while (step <= MyTestInfo.ResultsParams.NumResultParams) //{ //@@ Calibrate the test port convectorr gauge to 'zero' @@// Trace.WriteLine(iteSlot + "Calibrate the test port convectorr gauge to ZERO..."); status = Helper.DoThis(myLD, ref MyTestInfo, "TPTCZERO", "ok", step, "ok"); step++; //@@ Obtain system and test port pressure level @@// Trace.WriteLine(iteSlot + "Obtain and verify the TP and system pressure level..."); retval = Helper.SendCommand(myLD, ref status, "?PRESSURES", "ok"); if (status == true) { string[] response = retval.Split(new string[] { "(mTorr): ", "\r\nSpectrometer" }, StringSplitOptions.RemoveEmptyEntries); string[] response2 = retval.Split(new string[] { "Spectrometer (uTorr): ", " ok\r\n" }, StringSplitOptions.RemoveEmptyEntries); Pressure = Convert.ToInt32(response[1]); System_pressure = Convert.ToInt32(response2[1]); Trace.WriteLine(iteSlot + "Pressure: " + Pressure + "mTorr System Pressure: " + System_pressure + "uTorr"); //Verify the qualification of both pressure level MyTestInfo.ResultsParams[step].Result = Pressure.ToString(); step++; MyTestInfo.ResultsParams[step].Result = System_pressure.ToString(); } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; throw new Exception("Test point failed."); } // DB to open back IMG 100n SQL_XGS.Update_XGS(myuutdata.Options, "No"); step++; //} break; } case "5.4.13 Vent to ATM": { step = 1; //@@ Vent to Atmosphere @@// Trace.WriteLine(iteSlot + "Vent the UUT to atmosphere..."); status = Helper.DoThis(myLD, ref MyTestInfo, "VENT", "VENT ok", step, "ok"); step++; checkagain: retval = Helper.SendCommand(myLD, ref status, "?PRESSURES", "ok"); string[] resp = retval.Split(new string[] { "(mTorr): ", "\r\nSpectrometer" }, StringSplitOptions.RemoveEmptyEntries); int Pressure = Convert.ToInt32(resp[1]); Trace.WriteLine(iteSlot + "Pressure: " + Pressure + "mTorr System Pressure: "); // commented out below manual limit checking, use test executive to do the limit checking and display the result correctly if (!(Pressure >= 700000 && Pressure <= 760000)) { Thread.Sleep(2000); goto checkagain; } // YS wong added to make sure vented properly Thread.Sleep(1000); //@@ Wait for UUT to vent to atmosphere @@// Trace.WriteLine(iteSlot + "Wait for UUT to vent to atmosphere..."); MyTestInfo.ResultsParams[step].Result = "ok"; Trace.WriteLine(iteSlot + "Test point complete."); step++; //@@ Obtain pressure level @@// Trace.WriteLine(iteSlot + "Obtain and verify the TP pressure level..."); retval = Helper.SendCommand(myLD, ref status, "?PRESSURES", "ok"); if (status == true) { string[] response = retval.Split(new string[] { "(mTorr): ", "\r\nSpectrometer" }, StringSplitOptions.RemoveEmptyEntries); Pressure = Convert.ToInt32(response[1]); Trace.WriteLine(iteSlot + "Pressure: " + Pressure + "mTorr"); MyTestInfo.ResultsParams[step].Result = Convert.ToString(Pressure); Trace.WriteLine(iteSlot + "Test point complete."); } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; //throw new Exception("Test point failed."); } break; } case "5.4.16 ReloadUUT": { step = 1; //@@ wait for the UUT to respond with its 'wake up' prompt after sending command 'RELOAD' @@// Trace.WriteLine(iteSlot + "Reloading the UUT..."); status = Helper.Reload(myLD); if (status == true) { MyTestInfo.ResultsParams[step].Result = "ok"; } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; } step++; //@@ After reload, wait for system ready. if the system is ready, value '-1' will be returned, else '0' . The timeout is 3 mins. @@// Trace.WriteLine(iteSlot + "Wait for system to get ready..."); status = Helper.Wait_SystemReady(myLD); if (status == true) { MyTestInfo.ResultsParams[step].Result = "ok"; } else { MyTestInfo.ResultsParams[step].Result = "FAILED"; } step++; break; } case "5.4.17 Final Verification": { step = 1; //@@ Access to full command @@// Trace.WriteLine(iteSlot + "Access to full command..."); status = Helper.DoThis(myLD, ref MyTestInfo, "XYZZY", "ok", step, "ok"); step++; //@@ Verify softstart state @@// Trace.WriteLine(iteSlot + "Verify the status of softstart..."); status = Helper.DoThis(myLD, ref MyTestInfo, "?SOFTSTART", "ok", step, "ok"); break; } } } catch (Exception ex) { Trace.WriteLine(ex.Message); Helper.Fail_Test(ref MyTestInfo, ex.Message, step); throw; } return(MyTestInfo); }
public static TestInfo DoSeq5_8(VSLeakDetector myLD, ref TestInfo myTestInfo, ref UUTData myuutdata) { Boolean status = false; string retval; int step = 1; Helper.comPort = comPort; try { switch (myTestInfo.TestLabel) { case "5.8.3 Leak_Reading": { step = 1; while (step <= myTestInfo.ResultsParams.NumResultParams) { //Access to full command Trace.WriteLine(iteSlot + "Access to full command..."); Helper.SendCommand(myLD, ref status, "XYZZY", "ok"); //Obtain stdleak reading Trace.WriteLine(iteSlot + "Obtain the stdleak reading..."); retval = Helper.SendCommand(myLD, ref status, "?STDLEAK", "ok"); if (status == true) { string[] response = retval.Split(new string[] { "?STDLEAK ", "ok" }, StringSplitOptions.RemoveEmptyEntries); Stdleak = Convert.ToDouble(response[0]); } else { Stdleak = -999; } //@@ Read and observe the leak rate by using helium spray probe. A dialog will be shown to monitor the leak rate @@// Trace.WriteLine(iteSlot + "Monitoring the UUT leak reading when STDLEAK CLOSED..."); Leak_Reading monitor_leak_reading = new Leak_Reading(myLD); monitor_leak_reading.StartPosition = FormStartPosition.Manual; monitor_leak_reading.Location = new Point(locX, locY); Leak_Reading.Stdleak_status = "CLOSE"; monitor_leak_reading.ShowDialog(); if (monitor_leak_reading.DialogResult == DialogResult.Yes) { monitor_leak_reading.Hide(); Trace.WriteLine(iteSlot + "Test point complete."); myTestInfo.ResultsParams[step].Result = "ok"; step++; } else if (monitor_leak_reading.DialogResult == DialogResult.No) { monitor_leak_reading.Dispose(); myTestInfo.ResultsParams[step].Result = "FAILED"; //throw new Exception("Test point failed."); } //@@ Open the Stdleak @@// Trace.WriteLine(iteSlot + "Opening the stdleak..."); status = Helper.DoThis(myLD, ref myTestInfo, "STDLEAK", "ok", step, "ok"); Thread.Sleep(12000); //(MOD: Time is required to fully open the stdleak contained inside the UUT. In order to prevent any interruption while it is opening, a small interval must be implemented. step++; //@@ Read and observe the stdleak rate by using helium spray probe. A dialog will be shown to monitor the leak rate @@// Trace.WriteLine(iteSlot + "Monitoring the UUT leak reading when STDLEAK OPEN..."); Leak_Reading.Stdleak_status = "OPEN"; monitor_leak_reading.ShowDialog(); if (monitor_leak_reading.DialogResult == DialogResult.Yes) { monitor_leak_reading.Dispose(); Trace.WriteLine(iteSlot + "Test point complete."); myTestInfo.ResultsParams[step].Result = "ok"; step++; } else if (monitor_leak_reading.DialogResult == DialogResult.No) { monitor_leak_reading.Dispose(); myTestInfo.ResultsParams[step].Result = "FAILED"; //throw new Exception("Test point failed."); } //@@ Close the Stdleak @@// Trace.WriteLine(iteSlot + "Closing the stdleak..."); status = Helper.DoThis(myLD, ref myTestInfo, "STDLEAK", "ok", step, "ok"); step++; } break; } case "5.8.6 Final_setup": { step = 1; //@@ Sets the leak rate analog output voltage to Linear. Not preceded by a value. @@// Trace.WriteLine(iteSlot + "Sets the leak rate analog output voltage to linear..."); status = Helper.DoThis(myLD, ref myTestInfo, "INIT-LINEAR", "ok", step, "ok"); step++; //@@ Sets the status of Auto-zero < 0. Preceded by 0 or 1, 0 = OFF, 1 = ON. @@// Trace.WriteLine(iteSlot + "Sets the status of Auto Zero to ON..."); status = Helper.DoThis(myLD, ref myTestInfo, "1 INIT-AZ<0", "ok", step, "ok"); break; } case "5.8.7 Valve_cycle": { step = 1; while (step <= myTestInfo.ResultsParams.NumResultParams) { //@@ Access to full command @@// Trace.WriteLine(iteSlot + "Access to full command..."); status = Helper.DoThis(myLD, ref myTestInfo, "XYZZY", "ok", step, "ok"); step++; //Read the stdleak rate Trace.WriteLine(iteSlot + "Obtain the stdleak reading..."); retval = Helper.SendCommand(myLD, ref status, "?STDLEAK", "ok"); if (status == true) { string[] response = retval.Split(new string[] { "?STDLEAK ", "ok" }, StringSplitOptions.RemoveEmptyEntries); Stdleak = Convert.ToDouble(response[0]); } //@@ Vent the UUT @@// Trace.WriteLine(iteSlot + "Venting the UUT..."); status = Helper.DoThis(myLD, ref myTestInfo, "VENT", "ok", step, "ok"); step++; checkagain: retval = Helper.SendCommand(myLD, ref status, "?PRESSURES", "ok"); string[] resp = retval.Split(new string[] { "(mTorr): ", "\r\nSpectrometer" }, StringSplitOptions.RemoveEmptyEntries); int Pressure = Convert.ToInt32(resp[1]); Trace.WriteLine(iteSlot + "Pressure: " + Pressure + "mTorr System Pressure: "); // commented out below manual limit checking, use test executive to do the limit checking and display the result correctly if (!(Pressure >= 700000 && Pressure <= 760000)) { Thread.Sleep(2000); goto checkagain; } // YS Wong added to make sure vented properly Thread.Sleep(1000); //@@ Rough the UUT @@// Trace.WriteLine(iteSlot + "Roughing the UUT..."); status = Helper.DoThis(myLD, ref myTestInfo, "ROUGH", "ok", step, "ok"); Thread.Sleep(30000); //YS Wong string update; int counter = 1; recheck: update = Helper.SendCommand(myLD, ref status, "?VALVESTATE", "MIDSTAGE"); if (!update.Contains("MIDSTAGE")) { Thread.Sleep(2000); update = null; counter++; if (counter > 10) { break; } else { goto recheck; } } step++; update = null; // YS Wong //@@ STDleak ON @@// Trace.WriteLine(iteSlot + "Open the stdleak..."); status = Helper.DoThis(myLD, ref myTestInfo, "STDLEAK", "ok", step, "ok"); step++; Thread.Sleep(10000); //@@ Vent the UUT @@// Trace.WriteLine(iteSlot + "Venting the UUT..."); status = Helper.DoThis(myLD, ref myTestInfo, "VENT", "ok", step, "ok"); checkagain2: retval = Helper.SendCommand(myLD, ref status, "?PRESSURES", "ok"); string[] resp2 = retval.Split(new string[] { "(mTorr): ", "\r\nSpectrometer" }, StringSplitOptions.RemoveEmptyEntries); int Pressure2 = Convert.ToInt32(resp2[1]); Trace.WriteLine(iteSlot + "Pressure: " + Pressure + "mTorr System Pressure: "); // commented out below manual limit checking, use test executive to do the limit checking and display the result correctly if (!(Pressure >= 700000 && Pressure <= 760000)) { Thread.Sleep(2000); goto checkagain2; } // Hairus added to make sure vented properly //myLD.Open(); Thread.Sleep(1000); step++; //@@ Rough the UUT @@// Trace.WriteLine(iteSlot + "Roughing the UUT..."); status = Helper.DoThis(myLD, ref myTestInfo, "ROUGH", "ok", step, "ok"); Thread.Sleep(30000); // YS Wong update = null; counter = 1; recheck2: update = Helper.SendCommand(myLD, ref status, "?VALVESTATE", "MIDSTAGE"); if (!update.Contains("MIDSTAGE")) { Thread.Sleep(2000); update = null; counter++; if (counter > 10) { break; } else { goto recheck2; } } step++; update = null; // YS Wong //@@ STDleak OFF @@// Trace.WriteLine(iteSlot + "Close the stdleak..."); status = Helper.DoThis(myLD, ref myTestInfo, "STDLEAK", "ok", step, "ok"); step++; Thread.Sleep(30000); // After valve cycle test completed, wait until the leak rate stabilize before proceed for calibration //VSLeakDetector myLD = new VSLeakDetector(comPort); // myLD.iteSlot = iteSlot; string myRetVal = ""; //myLD.Open(); myLD.WaitForStdLeakState(ref myRetVal); Thread.Sleep(1000); bool isStabilized = myLD.WaitForStabilizedReading(ref myRetVal, 60, 0.97, 10); if (isStabilized) { Trace.WriteLine(iteSlot + "Std Leak reading is now stabilized"); } else { Trace.WriteLine(iteSlot + "Unable to get stable leak rate reading"); } //myLD.Close(); //myLD = null; } break; } } } catch (Exception ex) { Trace.WriteLine(ex.Message); Helper.Fail_Test(ref myTestInfo, ex.Message, step); throw; } return(myTestInfo); }
private void LoadTreeViewCategory(XmlReader reader, TreeViewItem categoryItem) { int itemCount = 0; while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { if (String.Equals(reader.Name, "test", StringComparison.OrdinalIgnoreCase)) { TestInfo testInfo = new TestInfo(reader); if (!testInfo.IsEmpty) { TextBlock headerText = new TextBlock(); headerText.Text = String.Format("({0:D3}) - {1}", itemCount, testInfo.Title); headerText.Margin = new Thickness(3, 0, 0, 0); Ellipse bullet = new Ellipse(); bullet.Height = 16; bullet.Width = 16; bullet.Fill = testInfo.StateBrush; bullet.Stroke = Brushes.DarkGray; bullet.StrokeThickness = 1; BulletDecorator decorator = new BulletDecorator(); decorator.Bullet = bullet; decorator.Margin = new Thickness(0, 0, 10, 0); decorator.Child = headerText; TreeViewItem treeItem = new TreeViewItem(); treeItem.Header = decorator; treeItem.Padding = new Thickness(3); treeItem.FontSize = 12; treeItem.FontWeight = FontWeights.Normal; treeItem.Tag = testInfo; categoryItem.Items.Add(treeItem); itemCount++; } } } else if (reader.NodeType == XmlNodeType.EndElement) { if (String.Equals(reader.Name, "category", StringComparison.OrdinalIgnoreCase)) { break; } } } }
/// <summary> /// helper function to the BeforeExpand method. Takes a test info object and returns a /// collection of TreeNodes representing the Compare nodes in the report for that test. /// </summary> /// <param name="testInfo">struct containing information on a specific test.</param> /// <returns>a collection of TreeNodes representing the Compare nodes in the report for that test</returns private IEnumerable<FailStateNode> GetTestCompareNodes(TestInfo testInfo) { string result = testInfo.Result; bool missingBaseReport; if ((missingBaseReport = result == MissingReportBaseline) || result == ReportDiffgram) { ReportAssembly reportAssembly = GetReportAssembly(testInfo); XmlReaderSettings readerSettings = new XmlReaderSettings(); readerSettings.CloseInput = true; //The nodes returned are based on the Compare nodes found in the result report file. //therefore, it is necessary to obtain this report file. Assembly assembly = reportAssembly.Assembly; using (Stream reportStream = missingBaseReport ? null : new MemoryStream()) { string rawMissingReport = null; if (missingBaseReport) { rawMissingReport = FindMissingReport(testInfo, reportAssembly, false); FailStateNode reportNode = new FailingCompareNode(ReportFileName, true); yield return reportNode; } else { using (XmlReader reportReader = XmlReader.Create(assembly.GetManifestResourceStream(testInfo.BaseFileName + ".Report.xml"), readerSettings)) { using (XmlReader diffReader = XmlReader.Create(new StringReader(testInfo.Content), readerSettings)) { new XmlPatch().Patch(reportReader, reportStream, diffReader); reportStream.Position = 0; } } // Eith reportStream now contains a TestReport XML file with at least 1 Compare node. // Check it against a 'faked pass', to determine if the report node is a pass or a failure. readerSettings.CloseInput = false; using (MemoryStream fakeReportStream = new MemoryStream()) { using (XmlReader reportReader = XmlReader.Create(reportStream, readerSettings)) { XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.CloseOutput = false; using (XmlWriter fakeReportWriter = XmlWriter.Create(fakeReportStream, writerSettings)) { FakePassTransform.Transform(reportReader, fakeReportWriter); } } fakeReportStream.Position = 0; reportStream.Position = 0; readerSettings.CloseInput = true; using (XmlReader reportReader = XmlReader.Create(assembly.GetManifestResourceStream(testInfo.BaseFileName + ".Report.xml"), readerSettings)) { using (XmlReader fakeReportReader = XmlReader.Create(fakeReportStream, readerSettings)) { yield return new XmlDiff(XmlDiffOptions.IgnoreXmlDecl | XmlDiffOptions.IgnorePrefixes | XmlDiffOptions.IgnoreWhitespace | XmlDiffOptions.IgnoreComments | XmlDiffOptions.IgnorePI).Compare(fakeReportReader, reportReader) ? new PassingCompareNode(ReportFileName) as FailStateNode : new FailingCompareNode(ReportFileName, false); } } } } //Next, create a treenode object for each compare node in the XML. readerSettings.CloseInput = true; using (XmlReader testResultReader = missingBaseReport ? XmlReader.Create(new StringReader(rawMissingReport), readerSettings) : XmlReader.Create(reportStream, readerSettings)) { while (testResultReader.Read()) { if (testResultReader.IsStartElement() && testResultReader.LocalName == "Compare") { string name = testResultReader.GetAttribute("name"); string testResult = testResultReader.GetAttribute("result"); string testName = string.Format("Compare{0}.orm", name == null ? "" : "." + name); yield return testResult == TestPassed ? new PassingCompareNode(testName) as FailStateNode : new FailingCompareNode(testName, testResult == MissingBaseline); } } } } } }
/// <summary> /// Handles logic for nodes selection /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void tvTestCases_AfterCheck(object sender, TreeViewEventArgs e) { TreeNode node = e.Node; // Add/ test to selected tests or remove or handle group selection if (node.Tag != null) { // node represents test TestInfo testInfo = (TestInfo)node.Tag; if (node.Checked) { // add test if (!_selectedTests.Contains(testInfo)) { _selectedTests.Add(testInfo); } } else { // remove tests _selectedTests.Remove(testInfo); } } else { // // If profile is being applied, don't check/uncheck child nodes. // If certification mode is being entered, don't check/uncheck child nodes - in this case we check // groups depending on tests selected. // If a group is selected/unselected depending on child nodes state, don't propogate selection // in opposite direction. if (!_bProfileBeingApplied && !_bubbleCheck) { // in certification mode, features selected should be considered. if (_certificationMode) { SelectAvailableChildTests(node); CheckIfAllChildrenChecked(node); } else { // else select all child nodes. foreach (TreeNode child in node.Nodes) { child.Checked = node.Checked; } } } // track group selection (for saving profile) if (node.Checked) { if (!_selectedGroups.Contains(node.Name)) { _selectedGroups.Add(node.Name); } } else { _selectedGroups.Remove(node.Name); } } // if node is selected by a user, check if parent state should be updated. if (e.Action == TreeViewAction.ByKeyboard || e.Action == TreeViewAction.ByMouse) { if (node.Checked) { TryCheckParent(node); } else { UncheckParent(node); } } tsbRunAll.Enabled = !_controller.Running && _selectedTests.Count > 0; }
public void Test_Sftp_Multiple_Async_Upload_And_Download_10Files_5MB_Each() { var maxFiles = 10; var maxSize = 5; using (var sftp = new SftpClient(Resources.HOST, Resources.USERNAME, Resources.PASSWORD)) { sftp.Connect(); var testInfoList = new Dictionary <string, TestInfo>(); for (int i = 0; i < maxFiles; i++) { var testInfo = new TestInfo(); testInfo.UploadedFileName = Path.GetTempFileName(); testInfo.DownloadedFileName = Path.GetTempFileName(); testInfo.RemoteFileName = Path.GetRandomFileName(); this.CreateTestFile(testInfo.UploadedFileName, maxSize); // Calculate hash value testInfo.UploadedHash = CalculateMD5(testInfo.UploadedFileName); testInfoList.Add(testInfo.RemoteFileName, testInfo); } var uploadWaitHandles = new List <WaitHandle>(); // Start file uploads foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; testInfo.UploadedFile = File.OpenRead(testInfo.UploadedFileName); testInfo.UploadResult = sftp.BeginUploadFile(testInfo.UploadedFile, remoteFile, null, null) as SftpUploadAsyncResult; uploadWaitHandles.Add(testInfo.UploadResult.AsyncWaitHandle); } // Wait for upload to finish bool uploadCompleted = false; while (!uploadCompleted) { // Assume upload completed uploadCompleted = true; foreach (var testInfo in testInfoList.Values) { var sftpResult = testInfo.UploadResult; if (!testInfo.UploadResult.IsCompleted) { uploadCompleted = false; } } Thread.Sleep(500); } // End file uploads foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; sftp.EndUploadFile(testInfo.UploadResult); testInfo.UploadedFile.Dispose(); } // Start file downloads var downloadWaitHandles = new List <WaitHandle>(); foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; testInfo.DownloadedFile = File.OpenWrite(testInfo.DownloadedFileName); testInfo.DownloadResult = sftp.BeginDownloadFile(remoteFile, testInfo.DownloadedFile, null, null) as SftpDownloadAsyncResult; downloadWaitHandles.Add(testInfo.DownloadResult.AsyncWaitHandle); } // Wait for download to finish bool downloadCompleted = false; while (!downloadCompleted) { // Assume download completed downloadCompleted = true; foreach (var testInfo in testInfoList.Values) { var sftpResult = testInfo.DownloadResult; if (!testInfo.DownloadResult.IsCompleted) { downloadCompleted = false; } } Thread.Sleep(500); } var hashMatches = true; var uploadDownloadSizeOk = true; // End file downloads foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; sftp.EndDownloadFile(testInfo.DownloadResult); testInfo.DownloadedFile.Dispose(); testInfo.DownloadedHash = CalculateMD5(testInfo.DownloadedFileName); if (!(testInfo.UploadResult.UploadedBytes > 0 && testInfo.DownloadResult.DownloadedBytes > 0 && testInfo.DownloadResult.DownloadedBytes == testInfo.UploadResult.UploadedBytes)) { uploadDownloadSizeOk = false; } if (!testInfo.DownloadedHash.Equals(testInfo.UploadedHash)) { hashMatches = false; } } // Clean up after test foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; sftp.DeleteFile(remoteFile); File.Delete(testInfo.UploadedFileName); File.Delete(testInfo.DownloadedFileName); } sftp.Disconnect(); Assert.IsTrue(hashMatches, "Hash does not match"); Assert.IsTrue(uploadDownloadSizeOk, "Uploaded and downloaded bytes does not match"); } }
/// <summary> /// Selects tests which currently is being executed. /// </summary> /// <param name="testInfo">Test information.</param> void HighlightActiveTest(TestInfo testInfo) { TreeNode node = _testNodes[testInfo]; tvTestCases.SelectedNode = node; }
// Load list of tests to be run into TestList. private static List <TestInfo> LoadTestList(CommandLineArgs commandLineArgs) { List <string> testNames; var testList = new List <TestInfo>(); // Clear forms/tests cache if desired. var formArg = commandLineArgs.ArgAsString("form"); // Load lists of tests to run. if (string.IsNullOrEmpty(formArg)) { testNames = LoadList(commandLineArgs.ArgAsString("test")); } // Find which tests best cover the desired forms. else { var formLookup = new FormLookup(); List <string> uncoveredForms; testNames = formLookup.FindTests(LoadList(formArg), out uncoveredForms); if (uncoveredForms.Count > 0) { MessageBox.Show("No tests found to show these Forms: " + string.Join(", ", uncoveredForms), "Warning"); return(testList); } } // Maintain order in list of explicitly specified tests var testDict = new Dictionary <string, int>(); for (int i = 0; i < testNames.Count; i++) { if (testDict.ContainsKey(testNames[i])) { MessageBox.Show("Duplicate test name: " + testNames[i]); throw new ArgumentException("Duplicate test name: " + testNames[i]); } testDict.Add(testNames[i], i); } var testArray = new TestInfo[testNames.Count]; var skipList = LoadList(commandLineArgs.ArgAsString("skip")); // Find tests in the test dlls. foreach (var testDll in TEST_DLLS) { foreach (var testInfo in RunTests.GetTestInfos(testDll)) { var testName = testInfo.TestClassType.Name + "." + testInfo.TestMethod.Name; if (testNames.Count == 0 || testNames.Contains(testName) || testNames.Contains(testInfo.TestMethod.Name)) { if (!skipList.Contains(testName) && !skipList.Contains(testInfo.TestMethod.Name)) { if (testNames.Count == 0) { testList.Add(testInfo); } else { string lookup = testNames.Contains(testName) ? testName : testInfo.TestMethod.Name; testArray[testDict[lookup]] = testInfo; } } } } } if (testNames.Count > 0) { testList.AddRange(testArray.Where(testInfo => testInfo != null)); } // Sort tests alphabetically, but run perf tests last for best coverage in a fixed amount of time. return(testList.OrderBy(e => e.IsPerfTest).ThenBy(e => e.TestMethod.Name).ToList()); }
public TestInfo HelloApi(TestInfo info) { return(info); }
public TestRunResults RunTest(TestInfo scenarioInf, ref string errorReason, RunCancelledDelegate runCancelled) { string scenarioPath = scenarioInf.TestPath; //prepare the instance that will contain test results for JUnit TestRunResults runDesc = new TestRunResults(); ConsoleWriter.ActiveTestRun = runDesc; ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running: " + scenarioPath); runDesc.TestType = TestType.LoadRunner.ToString(); _resultsFolder = Helper.GetTempDir(); //a directory with this name may already exist. try to delete it. if (Directory.Exists(_resultsFolder)) { try { // Directory.Delete(_resultsFolder, true); DirectoryInfo dir = new DirectoryInfo(_resultsFolder); dir.GetFiles().ToList().ForEach(file => file.Delete()); dir.GetDirectories().ToList().ForEach(subdir => subdir.Delete()); } catch (Exception) { Console.WriteLine(string.Format(Resources.CannotDeleteReportFolder, _resultsFolder)); } } else { try { Directory.CreateDirectory(_resultsFolder); } catch (Exception e) { errorReason = string.Format(Resources.FailedToCreateTempDirError, _resultsFolder); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return runDesc; } } //create LRR folder: _controller_result_dir = Path.Combine(_resultsFolder, LRR_FOLDER); Directory.CreateDirectory(_controller_result_dir); //init result params runDesc.ErrorDesc = errorReason; runDesc.TestPath = scenarioPath; runDesc.TestState = TestState.Unknown; if (!Helper.isLoadRunnerInstalled()) { runDesc.TestState = TestState.Error; runDesc.ErrorDesc = string.Format(Resources.LoadRunnerNotInstalled, System.Environment.MachineName); ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return runDesc; } //from here on, we may delegate runCancelled(). _runCancelled = runCancelled; //start scenario stop watch Stopwatch scenarioStopWatch = Stopwatch.StartNew(); //set state to running runDesc.TestState = TestState.Running; //and run the scenario bool res = runScenario(scenarioPath, ref errorReason, runCancelled); if (!res) { //runScenario failed. print the error and set test as failed ConsoleWriter.WriteErrLine(errorReason); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; runDesc.Runtime = scenarioStopWatch.Elapsed; //and try to close the controller closeController(); return runDesc; } else { try { ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); runDesc.ReportLocation = _resultsFolder; ConsoleWriter.WriteLine(Resources.LrAnalysingResults); //close the controller, so Analysis can be opened ConsoleWriter.WriteLine("closing Controller"); closeController(); ConsoleWriter.WriteLine("Controller closed"); //generate report using Analysis: ConsoleWriter.WriteLine("calling analysis report generator"); generateAnalysisReport(runDesc); ConsoleWriter.WriteLine("analysis report generator finished"); //check for errors: if (File.Exists(Path.Combine(_resultsFolder, "errors.xml"))) { checkForErrors(); } ConsoleWriter.WriteLine(Resources.LRErrorsSummary); //count how many ignorable errors and how many fatal errors occured. int ignore = getErrorsCount(ERRORState.Ignore); int fatal = getErrorsCount(ERRORState.Error); ConsoleWriter.WriteLine(String.Format(Resources.LrErrorSummeryNum, ignore, fatal)); ConsoleWriter.WriteLine(""); if (_errors != null && _errors.Count > 0) { foreach (ERRORState state in Enum.GetValues(typeof(ERRORState))) { ConsoleWriter.WriteLine(printErrorSummary(state)); } } //if scenario ended with fatal errors, change test state if (fatal > 0) { ConsoleWriter.WriteErrLine(string.Format(Resources.LRTestFailDueToFatalErrors, fatal)); errorReason = buildErrorReasonForErrors(); runDesc.TestState = TestState.Failed; } else if (ignore > 0) { ConsoleWriter.WriteLine(string.Format(Resources.LRTestWarningDueToIgnoredErrors, ignore)); runDesc.HasWarnings = true; runDesc.TestState = TestState.Warning; } else { Console.WriteLine(Resources.LRTestPassed); runDesc.TestState = TestState.Passed; } } catch (Exception e) { ConsoleWriter.WriteException(Resources.LRExceptionInAnalysisRunner, e); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = Resources.LRExceptionInAnalysisRunner; runDesc.Runtime = scenarioStopWatch.Elapsed; } //runDesc.ReportLocation = _resultsFolder; } runDesc.Runtime = scenarioStopWatch.Elapsed; if (!string.IsNullOrEmpty(errorReason)) runDesc.ErrorDesc = errorReason; closeController(); return runDesc; }
private static void ExecuteUniformTestGroup(ExecutionSettings settings, List <TestRecord> uniformTestGroup, ExecutionGroupRecord fileGroupRecord, int stateGroupIndex, int supportFileGroupIndex, ExecutionComponents components) { string groupPath = settings.DetermineGroupPath(stateGroupIndex, supportFileGroupIndex); DirectoryInfo executionDirectory = settings.DetermineTestExecutionDirectory(groupPath); string executionLabel = "(" + stateGroupIndex + "," + supportFileGroupIndex + ")"; IEnumerable <List <TestRecord> > testGroups; if (settings.CodeCoverageEnabled) // When using Code Coverage, we do not group test Appdomains together { testGroups = ExecutionGrouper.MakeGroupPerTest(uniformTestGroup); } else //Normal logic: Bucketize based on support for Shared App domains and driver { testGroups = ExecutionGrouper.Bucketize( uniformTestGroup, ExecutionGroupingLevel.SharedAppDomains, x => x.TestInfo.Driver.Executable + x.TestInfo.DriverParameters["SecurityLevel"]); } int testCount = 0; foreach (List <TestRecord> tests in testGroups) { ExecutionGroupRecord appDomainRecord = ExecutionGroupRecord.Begin(ExecutionGroupType.AppDomain, fileGroupRecord.Area); fileGroupRecord.ExecutionGroupRecords.Add(appDomainRecord); TestInfo first = tests.First().TestInfo; //Tests which allow grouping & use STI get to be run as a group during normal runs. //All tests must run separately with Code coverage runs. bool runAsGroup = !settings.CodeCoverageEnabled && first.Driver.Executable.Equals("Sti.exe", StringComparison.OrdinalIgnoreCase) && first.ExecutionGroupingLevel >= ExecutionGroupingLevel.SharedAppDomains; TimeSpan processDuration; if (runAsGroup)//STI gets special treatment { ExecutionEventLog.RecordStatus(string.Format(CultureInfo.InvariantCulture, "Starting Shared AppDomain Test Process #{0}-{1}", groupPath, testCount)); DirectoryInfo testLogDirectory = settings.DetermineTestLogDirectory(settings.DetermineGroupPath(stateGroupIndex, supportFileGroupIndex)); processDuration = ExecuteSti(settings, tests, components, executionDirectory, testLogDirectory); ExecutionEventLog.RecordStatus(string.Format(CultureInfo.InvariantCulture, "Finished Shared AppDomain Test Process #{0}-{1}", groupPath, testCount)); } else { ExecutionEventLog.RecordStatus(string.Format(CultureInfo.InvariantCulture, "Starting Test Process #{0}-{4} Runtests.cmd /Area={1} /Name={3} /Subarea={2}", groupPath, first.Area, first.SubArea, first.Name, testCount)); if (tests.Count > 1) { throw new InvalidDataException("[Infra bug]Tests should be hashed into lists of individual tests."); } DirectoryInfo testLogDirectory = settings.DetermineTestLogDirectory(settings.DetermineGroupPath(stateGroupIndex, supportFileGroupIndex, testCount)); PrepLogDirectory(testLogDirectory);//HACK: Ideally logging can guarantee this in final configuration. processDuration = ExecuteTest(settings, tests.First(), components, executionDirectory, testLogDirectory); ExecutionEventLog.RecordStatus(string.Format(CultureInfo.InvariantCulture, "Finished Test Process #{0}-{4} /Area={1} /Name={3} /Subarea={2}", groupPath, first.Area, first.SubArea, first.Name, testCount)); } Reporting.ReportingUtilities.ApplyProcessCost(tests, processDuration); appDomainRecord.End(); testCount++; } }
/// <summary> /// Initializes a new instance of the MainViewModel class. /// </summary> public MainViewModel() { TestInfo = new TestInfo(); StudentInfo = new StudentInfo(); Test = new Test(TestInfo); }
private string FindMissingReport(TestInfo testInfo, ReportAssembly reportAssembly, bool fakeComparePass) { string content = null; foreach (TestClass testclass in reportAssembly.TestClasses) { if (testclass.Name != testInfo.TestClassName || testclass.TestNamespace != testInfo.TestNamespace) { continue; } foreach (Test test in testclass.Tests) { if (test.Name != testInfo.TestName) { continue; } content = testInfo.Content; break; } break; } if (content == null) { return "XML failure: Report File Not Found."; } if (!fakeComparePass) { return content; } XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.IndentChars = "\t"; writerSettings.Encoding = Encoding.UTF8; using (MemoryStream memoryStream = new MemoryStream(content.Length * 2)) { using (XmlWriter writer = XmlWriter.Create(memoryStream, writerSettings)) { using (XmlReader reader = XmlReader.Create(new StringReader(content))) { // As this is an expected report, it is expected to pass! // Apply the fake pass template. XslCompiledTransform transform = FakePassTransform; transform.Transform(reader, writer); using (MemoryStream formatStream = new MemoryStream((int)memoryStream.Position)) { memoryStream.Position = 0; using (XmlReader formatReader = XmlReader.Create(memoryStream)) { using (XmlWriter formatWriter = XmlWriter.Create(formatStream, writerSettings)) { FormatXml(formatReader, formatWriter); } } formatStream.Position = 0; return new StreamReader(formatStream).ReadToEnd(); } } } } }
public static void TestReflection2() { object obj = CreateClass("TestInfo"); PropertyInfo info = obj.GetType().GetProperty("Id"); SetValue(info, obj, "21", "int"); //info.SetValue(obj, System.Convert.ToInt32("20")); PropertyInfo nameInfo = obj.GetType().GetProperty("Name"); SetValue(nameInfo, obj, "aqweddad", "string"); //nameInfo.SetValue(obj, "huhiuhiuhi"); PropertyInfo isInfo = obj.GetType().GetProperty("IsA"); SetValue(isInfo, obj, "true", "bool"); //isInfo.SetValue(obj, System.Convert.ToBoolean("false")); PropertyInfo heighInfo = obj.GetType().GetProperty("Heigh"); SetValue(heighInfo, obj, "51.4", "float"); //heighInfo.SetValue(obj, System.Convert.ToSingle("22.5")); PropertyInfo enumInfo = obj.GetType().GetProperty("TestType"); SetValue(enumInfo, obj, "VAR1", "enum"); //object infoValue = TypeDescriptor.GetConverter(enumInfo.PropertyType).ConvertFromInvariantString("VAR1"); //enumInfo.SetValue(obj, infoValue); Type type = typeof(string); object list = CreateList(type); for (int i = 0; i < 3; i++) { object addItem = "测试填数据" + i; list.GetType().InvokeMember("Add", BindingFlags.Default | BindingFlags.InvokeMethod, null, list, new object[] { addItem });//调用list的add方法添加数据 } // obj.GetType().GetProperty("AllStrList").SetValue(obj, list); // obj.GetType().GetProperty("AllStrList").SetValue(obj,list, ) // public virtual object GetValue(object obj, object[] index); // public abstract object GetValue(object obj, BindingFlags invokeAttr, Binder binder, object[] index, CultureInfo culture); // public abstract void SetValue(object obj, object value, BindingFlags invokeAttr, Binder binder, object[] index, CultureInfo culture); // [DebuggerHidden] // [DebuggerStepThrough] // public virtual void SetValue(object obj, object value, object[] index); object twoList = CreateList(typeof(TestInfoTwo)); for (int i = 0; i < 3; i++) { object addItem = CreateClass("TestInfoTwo"); PropertyInfo itemIdInfo = addItem.GetType().GetProperty("Id"); SetValue(itemIdInfo, addItem, "152" + i, "int"); PropertyInfo itemNameInfo = addItem.GetType().GetProperty("Name"); SetValue(itemNameInfo, addItem, "测试类" + i, "string"); twoList.GetType().InvokeMember("Add", BindingFlags.Default | BindingFlags.InvokeMethod, null, twoList, new object[] { addItem }); } obj.GetType().GetProperty("AllTestInfoList").SetValue(obj, twoList); TestInfo testInfo = (obj as TestInfo); //foreach (string str in testInfo.AllStrList) //{ // Debug.LogError(str); //} foreach (TestInfoTwo test in testInfo.AllTestInfoList) { Debug.LogError(test.Id + " " + test.Name); } }
void reportTreeView_BeforeSelect(object sender, TreeViewCancelEventArgs e) { TestInfo testInfo = new TestInfo(); FailStateNode node = (FailStateNode)e.Node; TreeNode parentNode = node.Parent; string leftText = null; string rightText = null; bool informationalText = false; if (parentNode != null) { TestCaseNode parentTestCase = parentNode as TestCaseNode; CompareNode compareNode = node as CompareNode; if (parentTestCase != null) { testInfo.Initialize(myReportSuites, parentTestCase); string result = testInfo.Result; ReportAssembly reportAssembly = GetReportAssembly(testInfo); string nodeText = node.Text; if (result == MissingReportBaseline) { if (nodeText == ReportFileName) { rightText = FindMissingReport(testInfo, reportAssembly, true); } else if (compareNode.FailState != NodeFailState.Passed) { FailingCompareNode failingCompare = (FailingCompareNode)compareNode; string missingReport = FindMissingReport(testInfo, reportAssembly, false); if (failingCompare.MissingBaseline) { // The full text is in the report file using (XmlReader reportReader = XmlReader.Create(new StringReader(missingReport))) { rightText = FindMissingCompare(reportReader, nodeText); } } else { // The full text is available by patching with data in the assembly leftText = GetExpectedText(testInfo, reportAssembly, nodeText); rightText = GetActualCompare(testInfo, reportAssembly, missingReport, nodeText); } } else { informationalText = true; leftText = GetExpectedText(testInfo, reportAssembly, nodeText); } } else if (result == ReportDiffgram) { if (node.FailState != NodeFailState.Passed) { FailingCompareNode failingCompare = (FailingCompareNode)node; if (failingCompare.MissingBaseline) { rightText = FindMissingCompare(testInfo, reportAssembly.Assembly, nodeText); } else { leftText = GetExpectedText(testInfo, reportAssembly, nodeText); string actualText = nodeText == ReportFileName ? GetFakePassReport(testInfo, reportAssembly) : GetActualCompare(testInfo, reportAssembly, null, nodeText); if (!string.IsNullOrEmpty(actualText)) { rightText = actualText; } } } else { informationalText = true; leftText = GetExpectedText(testInfo, reportAssembly, nodeText); } } } else if (null != (parentTestCase = node as TestCaseNode)) { informationalText = true; testInfo.Initialize(myReportSuites, parentTestCase); ReportAssembly reportAssembly = GetReportAssembly(testInfo); //get the expected report if (testInfo.Result == TestPassed) { leftText = GetExpectedText(testInfo, reportAssembly, ReportFileName); } else { leftText = ResourceStrings.FailureDetailsAvailableText; } } } tbLeft.Text = leftText; tbRight.Text = rightText; if (leftText != null && !informationalText) { btnCompare.Enabled = rightText != null; btnUpdateBaseline.Enabled = rightText != null; } else if (rightText != null && !informationalText) { btnCompare.Enabled = false; btnUpdateBaseline.Enabled = true; } else { btnCompare.Enabled = false; btnUpdateBaseline.Enabled = false; } // Assembly information is located on level 1 associatedSolutionToolStripMenuItem.Enabled = node.Level != 0 || node.Nodes.Count == 1; }
public void ShaderGeneratorOutput(TestInfo testInfo) { var file = testInfo.info; var filePath = Path.Combine(s_Path, file.Name); var textGraph = File.ReadAllText(filePath, Encoding.UTF8); var graph = JsonUtility.FromJson <ShaderGraph.MaterialGraph>(textGraph); Assert.IsNotNull(graph.masterNode, "No master node in graph."); // //Assert.IsNotNull(graphAsset, "Graph asset not found"); //var materialGraph = graphAsset.graph as UnityEngine.MaterialGraph.MaterialGraph; //Assert.IsNotNull(materialGraph); // Generate the shader List <PropertyCollector.TextureInfo> configuredTextures = new List <PropertyCollector.TextureInfo>(); var shaderString = String.Empty; //graph.masterNode.GetFullShader(GenerationMode.ForReals, Path.GetFileNameWithoutExtension(filePath), out configuredTextures); var rootPath = Path.Combine(Path.Combine(DefaultShaderIncludes.GetRepositoryPath(), "Testing"), "IntegrationTests"); var shaderTemplatePath = Path.Combine(rootPath, "ShaderTemplates"); Directory.CreateDirectory(shaderTemplatePath); var textTemplateFilePath = Path.Combine(shaderTemplatePath, string.Format("{0}.{1}", file.Name, "shader")); if (!File.Exists(textTemplateFilePath)) { File.WriteAllText(textTemplateFilePath, shaderString); Assert.Fail("Text template file not found for {0}, creating it.", file); } else { var textTemplate = File.ReadAllText(textTemplateFilePath); var textsAreEqual = string.Compare(shaderString, textTemplate, CultureInfo.CurrentCulture, CompareOptions.IgnoreSymbols); if (0 != textsAreEqual) { var failedPath = Path.Combine(rootPath, "Failed"); Directory.CreateDirectory(failedPath); var misMatchLocationResult = Path.Combine(failedPath, string.Format("{0}.{1}", file.Name, "shader")); var misMatchLocationTemplate = Path.Combine(failedPath, string.Format("{0}.template.{1}", file.Name, "shader")); File.WriteAllText(misMatchLocationResult, shaderString); File.WriteAllText(misMatchLocationTemplate, textTemplate); Assert.Fail("Shader text from graph {0}, did not match .template file.", file); } } m_Shader = ShaderUtil.CreateShaderAsset(shaderString); m_Shader.hideFlags = HideFlags.HideAndDontSave; Assert.IsNotNull(m_Shader, "Shader Generation Failed"); //Assert.IsFalse(AbstractMaterialNodeUI.ShaderHasError(m_Shader), "Shader has error"); m_PreviewMaterial = new Material(m_Shader) { hideFlags = HideFlags.HideAndDontSave }; foreach (var textureInfo in configuredTextures) { var texture = EditorUtility.InstanceIDToObject(textureInfo.textureId) as Texture; if (texture == null) { continue; } m_PreviewMaterial.SetTexture(textureInfo.name, texture); } Assert.IsNotNull(m_PreviewMaterial, "preview material could not be created"); const int res = 256; using (var generator = new MaterialGraphPreviewGenerator()) { var renderTexture = new RenderTexture(res, res, 16, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default) { hideFlags = HideFlags.HideAndDontSave }; generator.DoRenderPreview(renderTexture, m_PreviewMaterial, null, PreviewMode.Preview3D, true, 10); Assert.IsNotNull(renderTexture, "Render failed"); RenderTexture.active = renderTexture; m_Captured = new Texture2D(renderTexture.width, renderTexture.height, TextureFormat.ARGB32, false); m_Captured.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0); RenderTexture.active = null; //can help avoid errors Object.DestroyImmediate(renderTexture, true); // find the reference image var dumpFileLocation = Path.Combine(shaderTemplatePath, string.Format("{0}.{1}", file.Name, "png")); if (!File.Exists(dumpFileLocation)) { // no reference exists, create it var generated = m_Captured.EncodeToPNG(); File.WriteAllBytes(dumpFileLocation, generated); Assert.Fail("Image template file not found for {0}, creating it.", file); } var template = File.ReadAllBytes(dumpFileLocation); m_FromDisk = new Texture2D(2, 2); m_FromDisk.LoadImage(template, false); var rmse = CompareTextures(m_FromDisk, m_Captured); if (rmse > testInfo.threshold) { var failedPath = Path.Combine(rootPath.ToString(), "Failed"); Directory.CreateDirectory(failedPath); var misMatchLocationResult = Path.Combine(failedPath, string.Format("{0}.{1}", file.Name, "png")); var misMatchLocationTemplate = Path.Combine(failedPath, string.Format("{0}.template.{1}", file.Name, "png")); var generated = m_Captured.EncodeToPNG(); File.WriteAllBytes(misMatchLocationResult, generated); File.WriteAllBytes(misMatchLocationTemplate, template); Assert.Fail("Shader image from graph {0}, did not match .template file.", file); } } }
void FillTests(TestInfo ti) { if (ti.Tests == null) return; foreach (TestInfo test in ti.Tests) { if (test.Tests != null) Tests.Add (new NUnitTestSuite (this, test)); else Tests.Add (new NUnitTestCase (this, test)); } oldList = new UnitTest [Tests.Count]; Tests.CopyTo (oldList, 0); }
public bool SetData( TestInfoData data1, Dictionary <string, string> data2) { if (data1 == null) { ErrorString = "测试数据为空"; return(false); } tempSetting = data1.TestInfo.TempSetting.Split(","); if (data2 != null) { foreach (var item in data2) { exData.Add(item.Key, item.Value); } } testInfo = data1.TestInfo; classID = data1.ProductInfo.ProductClassID; var ports = BaseSettingsManager.Get <SystemGroupSetting>().GetPorts(classID); var types = new List <string>(); foreach (var source in data1.TestSources) { var type = source.SourceType; if (!BaseSettingsManager.Get <TestTypeSetting>().ContainsType(type)) { continue; } types.Add(type); } collection = new TestDataCollection(types.ToArray(), tempSetting, ports); foreach (var source in data1.TestSources) { var type = source.SourceType; if (!types.Contains(type)) { continue; } foreach (var tempData in source.SourceData) { var temp = tempData.Key; foreach (var portData in tempData.Value) { var port = portData.Key; var list = new SortedList <double, PointTestItem>(); foreach (var wlData in portData.Value) { var wl = wlData.Key; var item = wlData.Value; list.Add(wl, new PointTestItem(item)); } collection.Add(type, temp, port, list); } } } referenceData = new ReferenceData(Guid.Empty, false); for (int i = 0; i < data1.ReferenceData.Count; i++) { foreach (var portTemp in data1.ReferenceData[i]) { var port = portTemp.Key; foreach (var item in portTemp.Value) { var d = new PointTestItem(item.Value); referenceData.AddReference(i, port, d); } } } exData["sn"] = data1.TestInfo.SN; exData["csn"] = data1.CustomerSN; exData["workorder"] = data1.WorkInfo.WorkOrderID; exData["id"] = data1.WorkInfo.WorkOrderID; exData["workorderid"] = data1.WorkInfo.WorkOrderID; exData["productcode"] = data1.ProductInfo.ProductCode; exData["productname"] = data1.ProductInfo.ProductName; exData["stationname"] = data1.StationInfo.StationName; exData["operator"] = data1.WorkInfo.WorkInfoOperator; exData["room"] = tempSetting.Length > 0 ? tempSetting[0] : ""; exData["low"] = tempSetting.Length > 1 ? tempSetting[1] : ""; exData["high"] = tempSetting.Length > 2 ? tempSetting[2] : ""; exData["productremark"] = data1.ProductInfo.ProductRemark; var remarks = data1.ProductInfo.ProductRemark.Split("\r\n"); for (int i = 0; i < remarks.Length; i++) { exData[$"productremark[{i}]"] = remarks[i]; } return(true); }
private void btnUpdateBaseline_Click(object sender, EventArgs e) { FailingCompareNode currentNode = reportTreeView.SelectedNode as FailingCompareNode; if (currentNode == null) { MessageBox.Show("Please make sure the correct item in the tree view to the left is selected."); return; } string extension = currentNode.Text; TestInfo testInfo = new TestInfo(); testInfo.Initialize(myReportSuites, (TestCaseNode)currentNode.Parent); string newText = tbRight.Text; if (newText.Length < 2) { MessageBox.Show("Textbox cannot be empty."); return; } string assemblyLocation = myBaseDirectory + testInfo.AssemblyLocation; string solution = LookupSolution(assemblyLocation); Project project = null; string targetOuputFile = (new FileInfo(assemblyLocation)).Name; string rootNamespace = ""; string projectDirectory = ""; DTE2 dte = null; while (project == null) { if (solution.Length == 0 || !File.Exists(solution)) { solution = MapAssemblyToSolution(assemblyLocation, solution, true, false); if (solution.Length == 0) { return; } } dte = FindDTEInstance(solution); if (dte == null) { dte = Activator.CreateInstance(Type.GetTypeFromProgID("VisualStudio.DTE." + Settings.Default.VisualStudioVersion)) as DTE2; dte.MainWindow.Visible = true; dte.Solution.Open(solution); } // Guid.Empty.ToString("B") creates 32 digits separated by hyphens, enclosed in brackets: //{00000000-0000-0000-0000-000000000000} if (!dte.Solution.IsOpen) { return; } // Find the appropriate project in the solution from the assembly path foreach (Project testProject in dte.Solution.Projects) { Properties properties = testProject.Properties; if (properties != null) { try { if (0 == string.Compare((string)properties.Item("OutputFileName").Value, targetOuputFile, StringComparison.OrdinalIgnoreCase)) { rootNamespace = (string)properties.Item("RootNamespace").Value; projectDirectory = (string)properties.Item("FullPath").Value; project = testProject; break; } } catch (ArgumentException) { // Swallow it } } } if (project == null) { MapAssemblyToSolution(assemblyLocation, "", false, false); solution = ""; } } string fileDirectory = testInfo.TestNamespace; int rootNamespaceLength = rootNamespace.Length; if (rootNamespaceLength != 0) { int directoryLength = fileDirectory.Length; if (directoryLength == rootNamespaceLength) { fileDirectory = ""; } else if (directoryLength > (rootNamespaceLength + 1) && fileDirectory[rootNamespaceLength] == '.' && fileDirectory.StartsWith(rootNamespace)) { fileDirectory = fileDirectory.Substring(rootNamespaceLength + 1); } } fileDirectory = fileDirectory.Replace('.', '\\'); string testClassName = testInfo.TestClassName; string testName = testInfo.TestName; string fileName = string.Format(CultureInfo.InvariantCulture, "{0}.{1}.{2}", testClassName, testName, extension); string filePath = (fileDirectory.Length == 0) ? string.Format(CultureInfo.InvariantCulture, @"{0}{1}", projectDirectory, fileName) : string.Format(CultureInfo.InvariantCulture, @"{0}{1}\{2}", projectDirectory, fileDirectory, fileName); bool documentUpdated = false; Document doc = null; if (dte.ItemOperations.IsFileOpen(filePath, Guid.Empty.ToString("B"))) { doc = dte.Documents.Item(filePath); TextDocument textDoc = doc.Object("TextDocument") as TextDocument; if (textDoc != null) { textDoc.StartPoint.CreateEditPoint().ReplaceText(textDoc.EndPoint, newText, 0); documentUpdated = true; } } ProjectItem existingProjectItem = ProjectContains(project.ProjectItems, fileName); if (existingProjectItem != null) { // the project does contain the item. if (documentUpdated) { doc.Save(""); } else if (doc != null) { doc.Close(vsSaveChanges.vsSaveChangesNo); } else { File.WriteAllText(filePath, newText, Encoding.UTF8); } SetFileProperties(existingProjectItem); } else { if (doc != null) { doc.Close(documentUpdated ? vsSaveChanges.vsSaveChangesYes : vsSaveChanges.vsSaveChangesNo); } //the file is not listed as part of the project or is not in the expacted namespace and must be added. AddFileToProject(project.ProjectItems, filePath, fileDirectory, newText); } //clear out the correct tree node, to prevent this method running again against this same node. currentNode.IsRepaired = true; return; }
public BaseMeasurementTask() { testInfoRepository = DbFactory.Repository <TestInfoRepository>(false); testInfo = testInfoRepository.GetData(Guid.Empty); }
/// <summary> /// Selects tests which currently is being executed. /// </summary> /// <param name="testInfo">Test information.</param> void HighlightActiveTest(TestInfo testInfo) { tvTestCases.HighlightActiveTest(testInfo, _controller.ScrollingEnabled); }
/// <summary> /// Enumerates files in current directory and finds tests. /// </summary> /// <returns>List of tests loaded.</returns> public List <TestInfo> LoadTests() { _testInfos = new List <TestInfo>(); string location = Assembly.GetExecutingAssembly().Location; string path = Path.GetDirectoryName(location); foreach (string file in Directory.GetFiles(path, "*.dll")) { try { System.Reflection.Assembly assembly = Assembly.LoadFile(file); if (assembly.GetCustomAttributes( typeof(TestAssemblyAttribute), false).Length > 0) { // Test assembly foreach (Type t in assembly.GetTypes()) { object[] attrs = t.GetCustomAttributes(typeof(TestClassAttribute), true); if (attrs.Length > 0) { if (t.IsSubclassOf(typeof(Tests.Common.TestBase.BaseTest))) { foreach (MethodInfo mi in t.GetMethods()) { object[] testAttributes = mi.GetCustomAttributes(typeof(TestAttribute), true); if (testAttributes.Length > 0) { TestAttribute attribute = (TestAttribute)testAttributes[0]; TestInfo existing = _testInfos.Where(ti => ti.Order == attribute.Order).FirstOrDefault(); if (existing != null) { System.Diagnostics.Debug.WriteLine(string.Format("One more test with order {0} found {1}", attribute.Order, attribute.Name)); if (existing.Version > attribute.Version) { System.Diagnostics.Debug.WriteLine("Leave test already loaded"); continue; } else { System.Diagnostics.Debug.WriteLine("Reload newer test"); _testInfos.Remove(existing); } } TestInfo testInfo = new TestInfo(); testInfo.Method = mi; testInfo.Name = attribute.Name; testInfo.Group = attribute.Path; testInfo.Order = attribute.Order; testInfo.Version = attribute.Version; testInfo.Interactive = attribute.Interactive; testInfo.RequirementLevel = attribute.RequirementLevel; testInfo.RequiredFeatures.AddRange(attribute.RequiredFeatures); testInfo.Services.AddRange(attribute.Services); _testInfos.Add(testInfo); } } } } } } } catch (Exception exc) { } } View.DisplayTests(_testInfos); if (TestsLoaded != null) { TestsLoaded(_testInfos); } return(_testInfos); }
void reportTreeView_BeforeExpand(object sender, TreeViewCancelEventArgs e) { TestCaseNode parentNode = e.Node as TestCaseNode; if (parentNode != null) { TestInfo testInfo = new TestInfo(); testInfo.Initialize(myReportSuites, parentNode); //clear out the dummy subnode, if present... TreeNodeCollection nodes = parentNode.Nodes; if (nodes.Count > 0) { FailStateNode firstNode = (FailStateNode)parentNode.FirstNode; if (firstNode is DummyTreeNode) { nodes.Clear(); foreach (FailStateNode childNode in GetTestCompareNodes(testInfo)) { nodes.Add(childNode); parentNode.NotifyChildAttached(childNode); } parentNode.NotifyChildDetached(firstNode); } } } }
/// <summary> /// Runs the provided test on all the environments. /// </summary> /// <param name="testInfo"> The test information. </param> /// <param name="errorReason"> failure reason </param> /// <param name="runCancelled"> delegate to RunCancelled </param> /// <returns> /// The run results for the current test. /// </returns> public TestRunResults RunTest(TestInfo testInfo, ref string errorReason, RunCancelledDelegate runCancelled) { // change the DCOM setting for qtp application Helper.ChangeDCOMSettingToInteractiveUser(); testInfo.ReportPath = testInfo.TestPath + @"\ParallelReport"; // this is to make sure that we do not overwrite the report // when we run the same test multiple times on the same build string resFolder = Helper.GetNextResFolder(testInfo.ReportPath); var runResults = new TestRunResults { ReportLocation = testInfo.ReportPath, ErrorDesc = errorReason, TestState = TestState.Unknown, TestPath = testInfo.TestPath, TestType = TestType.ParallelRunner.ToString() }; // set the active test run ConsoleWriter.ActiveTestRun = runResults; if (!_canRun) { ConsoleWriter.WriteLine("Could not find parallel runner executable!"); errorReason = Resources.ParallelRunnerExecutableNotFound; runResults.TestState = TestState.Error; runResults.ErrorDesc = errorReason; return(runResults); } // Try to create the ParalleReport path try { Directory.CreateDirectory(runResults.ReportLocation); }catch (Exception) { errorReason = string.Format(Resources.FailedToCreateTempDirError, runResults.ReportLocation); runResults.TestState = TestState.Error; runResults.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runResults); } ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " => Using ParallelRunner to execute test: " + testInfo.TestPath); _runCancelled = runCancelled; // prepare the json file for the process var configFilePath = string.Empty; try { configFilePath = ParallelRunnerEnvironmentUtil.GetConfigFilePath(testInfo, _mcConnectionInfo, _environments); _configFiles.Add(configFilePath); }catch (ParallelRunnerConfigurationException ex) // invalid configuration { errorReason = ex.Message; runResults.ErrorDesc = errorReason; runResults.TestState = TestState.Error; return(runResults); } // Parallel runner argument "-c" for config path and "-o static" so that // the output from ParallelRunner is compatible with Jenkins var arguments = String.Format(ParallelRunnerArguments, configFilePath); // the test can be started now runResults.TestState = TestState.Running; var runTime = new Stopwatch(); runTime.Start(); string failureReason = null; runResults.ErrorDesc = null; // execute parallel runner and get the run result status int exitCode = ExecuteProcess(_parallelRunnerPath, arguments, ref failureReason); // set the status of the build based on the exit code RunResultsFromParallelRunnerExitCode(runResults, exitCode, failureReason, ref errorReason); // update the run time runResults.Runtime = runTime.Elapsed; // update the report location as the report should be // generated by now runResults.ReportLocation = resFolder; return(runResults); }
/// <summary> /// Parse all of the provided environment strings for this test. /// </summary> /// <param name="environments">the environments list</param> /// <param name="testInfo">the test information </param> /// <returns>the parallel test run configuration</returns> public static ParallelTestRunConfiguration ParseEnvironmentStrings(IEnumerable <string> environments, TestInfo testInfo) { var parallelTestRunConfiguration = new ParallelTestRunConfiguration { reportPath = testInfo.ReportPath }; var items = new List <ParallelTestRunConfigurationItem>(); foreach (var env in environments) { var environment = new ParallelTestRunConfiguraion.Environment(); // try to determine the environment type var type = GetEnvironmentType(env); if (type == EnvironmentType.MOBILE) { environment.mobile = ParseMobileEnvironment(env); if (environment.mobile == null) { throw new ParallelRunnerConfigurationException("Invalid mobile configuration provided: " + env); } } else if (type == EnvironmentType.WEB) { environment.web = ParseWebEnvironment(env); if (environment.web == null) { throw new ParallelRunnerConfigurationException("Invalid web configuration provided: " + env); } } else { // environment might be an empty string, just ignore it continue; } var item = new ParallelTestRunConfigurationItem { test = testInfo.TestPath, env = environment, reportPath = testInfo.TestPath }; items.Add(item); } parallelTestRunConfiguration.parallelRuns = items.ToArray(); return(parallelTestRunConfiguration); }
public static string GetIdentifiedTest(FileInfo testFile, TestInfo info, LanguageSupport support) { string relativePath; return(GetIdentifiedTest(testFile, info, support, out relativePath)); }
/// <summary> /// Loads tests from the type specified /// </summary> /// <param name="t"></param> void LoadTests(Type t) { // if this is a test class, if (t.GetInterfaces().Contains(typeof(ITest))) { // enumerate methods. foreach (MethodInfo mi in t.GetMethods()) { if (mi.DeclaringType != t) { //System.Diagnostics.Debug.WriteLine(string.Format("Method [{0}] in [{1}] is inherited from [{2}] - skip processing ", mi.Name, t.Name, mi.DeclaringType.Name)); continue; } object[] testAttributes = mi.GetCustomAttributes(typeof(TestAttribute), true); // if this is a test method, if (testAttributes.Length > 0) { // get test information. TestAttribute attribute = (TestAttribute)testAttributes[0]; // check if a test needs to be updated TestInfo existing = _testInfos.Where(ti => ti.Id == attribute.Id && ti.Category == attribute.Category).FirstOrDefault(); if (existing != null) { System.Diagnostics.Debug.WriteLine(string.Format("--- One more test with order {0} found [{1} in {2} and {3} in {4} ]", attribute.Order, attribute.Name, t.Name, existing.Name, existing.Method.ReflectedType.Name)); if (existing.Version > attribute.Version) { System.Diagnostics.Debug.WriteLine("Leave test already loaded"); continue; } else { System.Diagnostics.Debug.WriteLine("Reload newer test"); _testInfos.Remove(existing); } } // add test info TestInfo testInfo = new TestInfo(); testInfo.Method = mi; testInfo.Name = attribute.Name; testInfo.Group = attribute.Path; testInfo.Order = attribute.Order; testInfo.ExecutionOrder = attribute.ExecutionOrder; testInfo.Id = attribute.Id; testInfo.Category = attribute.Category; testInfo.Version = attribute.Version; testInfo.RequirementLevel = attribute.RequirementLevel; testInfo.RequiredFeatures.AddRange(attribute.RequiredFeatures); testInfo.FunctionalityUnderTest.AddRange(attribute.FunctionalityUnderTest); _testInfos.Add(testInfo); // check if this test requires additional settings if (attribute.ParametersTypes != null) { foreach (Type type in attribute.ParametersTypes) { if (!_settingsTypes.Contains(type)) { _settingsTypes.Add(type); } } } } } } }
private void CheckEvent( TestAction action, string fileName, TestInfo test ) { CheckEvent( action, fileName ); Assert.AreEqual( TESTNAME, ((TestEventArgs)catcher.Events[0]).Test.TestName.Name ); }
/// <summary> /// Multiply TestInfo by VariationAttributes found on type's constructors. /// Create TestInfo from a TestAttribute, Type and default TestInfo. /// New test info will be added to tests List. /// </summary> protected override ICollection <TestInfo> BuildTestInfo(TestAttribute testAttribute, Type ownerType, TestInfo defaultTestInfo) { List <TestInfo> tests = new List <TestInfo>(); TestInfo baseTestInfo = base.BuildTestInfo(testAttribute, ownerType, defaultTestInfo).First(); // Search each constructor for variationAttributes foreach (ConstructorInfo constructorInfo in ownerType.GetConstructors()) { IEnumerable <VariationAttribute> variationAttributes = constructorInfo.GetCustomAttributes(typeof(VariationAttribute), false).Cast <VariationAttribute>(); tests.AddRange(Multiply(baseTestInfo, variationAttributes, ownerType)); } // No variationAttributes found for the base test. Add it without variation information. if (tests.Count == 0) { tests.Add(baseTestInfo); } return(tests); }
public void Test_Sftp_Multiple_Async_Upload_And_Download_10Files_5MB_Each() { var maxFiles = 10; var maxSize = 5; using (var sftp = new SftpClient(Resources.HOST, Resources.USERNAME, Resources.PASSWORD)) { sftp.Connect(); var testInfoList = new Dictionary<string, TestInfo>(); for (int i = 0; i < maxFiles; i++) { var testInfo = new TestInfo(); testInfo.UploadedFileName = Path.GetTempFileName(); testInfo.DownloadedFileName = Path.GetTempFileName(); testInfo.RemoteFileName = Path.GetRandomFileName(); this.CreateTestFile(testInfo.UploadedFileName, maxSize); // Calculate hash value testInfo.UploadedHash = CalculateMD5(testInfo.UploadedFileName); testInfoList.Add(testInfo.RemoteFileName, testInfo); } var uploadWaitHandles = new List<WaitHandle>(); // Start file uploads foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; testInfo.UploadedFile = File.OpenRead(testInfo.UploadedFileName); testInfo.UploadResult = sftp.BeginUploadFile(testInfo.UploadedFile, remoteFile, null, null) as SftpUploadAsyncResult; uploadWaitHandles.Add(testInfo.UploadResult.AsyncWaitHandle); } // Wait for upload to finish bool uploadCompleted = false; while (!uploadCompleted) { // Assume upload completed uploadCompleted = true; foreach (var testInfo in testInfoList.Values) { var sftpResult = testInfo.UploadResult; if (!testInfo.UploadResult.IsCompleted) { uploadCompleted = false; } } Thread.Sleep(500); } // End file uploads foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; sftp.EndUploadFile(testInfo.UploadResult); testInfo.UploadedFile.Dispose(); } // Start file downloads var downloadWaitHandles = new List<WaitHandle>(); foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; testInfo.DownloadedFile = File.OpenWrite(testInfo.DownloadedFileName); testInfo.DownloadResult = sftp.BeginDownloadFile(remoteFile, testInfo.DownloadedFile, null, null) as SftpDownloadAsyncResult; downloadWaitHandles.Add(testInfo.DownloadResult.AsyncWaitHandle); } // Wait for download to finish bool downloadCompleted = false; while (!downloadCompleted) { // Assume download completed downloadCompleted = true; foreach (var testInfo in testInfoList.Values) { var sftpResult = testInfo.DownloadResult; if (!testInfo.DownloadResult.IsCompleted) { downloadCompleted = false; } } Thread.Sleep(500); } var hashMatches = true; var uploadDownloadSizeOk = true; // End file downloads foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; sftp.EndDownloadFile(testInfo.DownloadResult); testInfo.DownloadedFile.Dispose(); testInfo.DownloadedHash = CalculateMD5(testInfo.DownloadedFileName); if (!(testInfo.UploadResult.UploadedBytes > 0 && testInfo.DownloadResult.DownloadedBytes > 0 && testInfo.DownloadResult.DownloadedBytes == testInfo.UploadResult.UploadedBytes)) { uploadDownloadSizeOk = false; } if (!testInfo.DownloadedHash.Equals(testInfo.UploadedHash)) { hashMatches = false; } } // Clean up after test foreach (var remoteFile in testInfoList.Keys) { var testInfo = testInfoList[remoteFile]; sftp.DeleteFile(remoteFile); File.Delete(testInfo.UploadedFileName); File.Delete(testInfo.DownloadedFileName); } sftp.Disconnect(); Assert.IsTrue(hashMatches, "Hash does not match"); Assert.IsTrue(uploadDownloadSizeOk, "Uploaded and downloaded bytes does not match"); } }
public IEnumerator TestScene([ValueSource(typeof(CollectScenes), "scenes")] TestInfo testInfo) { var prjRelativeGraphsPath = s_Path.Aggregate("Assets", Path.Combine); var filePath = Path.Combine(prjRelativeGraphsPath, testInfo.relativePath); // open the scene EditorSceneManager.OpenScene(filePath); var testSetup = Object.FindObjectOfType <SetupSceneForRenderPipelineTest> (); Assert.IsNotNull(testSetup, "No SetupSceneForRenderPipelineTest in scene " + testInfo.name); Assert.IsNotNull(testSetup.cameraToUse, "No configured camera in <SetupSceneForRenderPipelineTest>"); testSetup.Setup(); for (int i = 0; i < testInfo.frameWait; ++i) { yield return(null); } while (Lightmapping.isRunning) { yield return(null); } var rtDesc = new RenderTextureDescriptor( testSetup.width, testSetup.height, testSetup.hdr ? RenderTextureFormat.ARGBHalf : RenderTextureFormat.ARGB32, 24); rtDesc.sRGB = PlayerSettings.colorSpace == ColorSpace.Linear; // render the scene var tempTarget = RenderTexture.GetTemporary(rtDesc); var oldTarget = testSetup.cameraToUse.targetTexture; testSetup.cameraToUse.targetTexture = tempTarget; testSetup.cameraToUse.Render(); testSetup.cameraToUse.targetTexture = oldTarget; // Readback the rendered texture var oldActive = RenderTexture.active; RenderTexture.active = tempTarget; var captured = new Texture2D(tempTarget.width, tempTarget.height, TextureFormat.ARGB32, false); captured.ReadPixels(new Rect(0, 0, testSetup.width, testSetup.height), 0, 0); RenderTexture.active = oldActive; var rootPath = Directory.GetParent(Application.dataPath).ToString(); var templatePath = Path.Combine(rootPath.ToString(), "ImageTemplates"); // find the reference image var dumpFileLocation = Path.Combine(templatePath, string.Format("{0}.{1}", testInfo.relativePath, "png")); if (!File.Exists(dumpFileLocation)) { // no reference exists, create it var fileInfo = new FileInfo(dumpFileLocation); fileInfo.Directory.Create(); var generated = captured.EncodeToPNG(); File.WriteAllBytes(dumpFileLocation, generated); Assert.Fail("Template file not found for {0}, creating it at {1}.", testInfo.name, dumpFileLocation); } var template = File.ReadAllBytes(dumpFileLocation); var fromDisk = new Texture2D(2, 2); fromDisk.LoadImage(template, false); var areEqual = CompareTextures(fromDisk, captured, testInfo.threshold); if (!areEqual) { var failedPath = Path.Combine(rootPath.ToString(), "Failed"); Directory.CreateDirectory(failedPath); var misMatchLocationResult = Path.Combine(failedPath, string.Format("{0}.{1}", testInfo.name, "png")); var misMatchLocationTemplate = Path.Combine(failedPath, string.Format("{0}.template.{1}", testInfo.name, "png")); var generated = captured.EncodeToPNG(); File.WriteAllBytes(misMatchLocationResult, generated); File.WriteAllBytes(misMatchLocationTemplate, template); } Assert.IsTrue(areEqual, "Scene from {0}, did not match .template file.", testInfo.relativePath); }
public void SetInfo(string path, TestInfo info) { if (File.Exists (path)) { CachedTestInfo cti = new CachedTestInfo (); cti.LastWriteTime = File.GetLastWriteTime (path); cti.Info = info; table [path] = cti; modified = true; } }
public TestInfo[] GetTestList(TestInfo filter) { Check(PermissionHelper.SelectPermission(EIDSSPermissionObject.Test)); return(TestInfo.GetAll(filter)); }