/// <summary> /// Writes a trace line to the instrumentation log. The instrmentation log is primarily /// used for deeper understanding what has happened during the stress run. /// </summary> /// <param name="level"></param> /// <param name="str"></param> public void WriteToInstrumentationLog(ReliabilityTestSet curTestSet, LoggingLevels level, string str) { if (curTestSet == null || (curTestSet.LoggingLevel & level) != 0) { str = String.Format("[{0} {2}] {1}\r\n", DateTime.Now.ToString(), str, Thread.CurrentThread.ManagedThreadId); try { lock (_instrumentationMessageQueue) { _instrumentationMessageQueue.Enqueue(str); } } catch (IOException) { /*Eat exceptions for IO */ } catch (InvalidOperationException) { /*Eat exceptions if we can't queue */ } } }
/// <summary> /// Runs the reliability tests. Called from Main with the name of the configuration file we should be using. /// All code in here runs in our starting app domain. /// </summary> /// <param name="testConfig">configuration file to use</param> /// <returns>100 on sucess, another number on failure.</returns> public int RunReliabilityTests(string testConfig, bool doReplay) { _totalSuccess = true; try { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Getting configuration..."); _reliabilityConfig = new ReliabilityConfig(testConfig); } catch (ArgumentException e) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, String.Format("Error while getting configuration: {0}", e)); return (-1); } catch (FileNotFoundException fe) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, String.Format("Couldn't find configuration file: {0}", fe)); return (-1); } // save the current directory string curDir = Directory.GetCurrentDirectory(); #if !PROJECTK_BUILD if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("BASE_ROOT"))) { Environment.SetEnvironmentVariable("BASE_ROOT", Environment.CurrentDirectory); } #endif // Enumerator through all the test sets... foreach (ReliabilityTestSet testSet in _reliabilityConfig) { if (testSet.InstallDetours) { _detourHelpers = new DetourHelpers(); _detourHelpers.Initialize(testSet); } else { if (_detourHelpers != null) { _detourHelpers.Uninitialize(); } _detourHelpers = null; } // restore the current directory incase a test changed it Directory.SetCurrentDirectory(curDir); _logger.WriteToInstrumentationLog(testSet, LoggingLevels.Tests, String.Format("Executing test set: {0}", testSet.FriendlyName)); _testsRunningCount = 0; _testsRanCount = 0; _curTestSet = testSet; _logger.ReportResults = _curTestSet.ReportResults; #if !PROJECTK_BUILD if (curTestSet.AppDomainLoaderMode == AppDomainLoaderMode.RoundRobin) { // full isoloation & normal are handled by the way we setup // tests in ReliabilityConfiguration. Round robin needs extra // logic when we create app domains. _testDomains = new AppDomain[curTestSet.NumAppDomains]; for (int domain = 0; domain < curTestSet.NumAppDomains; domain++) { _testDomains[domain] = AppDomain.CreateDomain("RoundRobinDomain" + domain.ToString()); } } #endif if (_curTestSet.ReportResults) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.SmartDotNet, "Reporting results..."); try { #if !PROJECTK_BUILD resultReporter = new Result(); resultReporter.Credentials = CredentialCache.DefaultCredentials; resultReporter.Url = curTestSet.ReportResultsTo + "/" + "result.asmx"; WebRequest.DefaultWebProxy = null; Build buildObj = new Build(); buildObj.Url = curTestSet.ReportResultsTo + "/" + "build.asmx"; buildObj.Credentials = CredentialCache.DefaultCredentials; Guid langGuid = Guid.Empty; DataSet ds = buildObj.GetGeneralBuildInfo(); foreach (DataTable myTable in ds.Tables) { if (myTable.TableName == "BuildAttributeValue") { foreach (DataRow myRow in myTable.Rows) { if (myRow["AttributeValueName"].ToString() == "English") { langGuid = new Guid(myRow["AttributeValueGuid"].ToString()); } } } } Guid buildGuid = buildObj.GetBuildGuid( Environment.GetEnvironmentVariable("WHIDBEY_TREE"), // build lab Environment.GetEnvironmentVariable("SHORTFLAVOR"), // build flavor, string Environment.GetEnvironmentVariable("VERSION"), // build version, string new Guid[] { langGuid }); // build attributes, Guid[] resultGroupGuid = resultReporter.CreateResultGroupEx( Guid.NewGuid(), String.Format("{0} - {1}", Environment.MachineName, DateTime.Now.ToShortDateString()), curTestSet.BvtCategory, new Guid[] { buildGuid }, new Guid[] { }, // TODO: fill in environmental attributes false, 0, "StressRun"); #endif } catch (Exception e) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.SmartDotNet, String.Format("Exception while communicating w/ smart.net server: {0}", e)); #if !PROJECTK_BUILD resultReporter = null; #endif AddFailure("Failed to initialize result reporting", null, -1); } } // we don't log while we're replaying a log file. if (!doReplay) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Logging, "Opening log file..."); if (!_curTestSet.DisableLogging) { _logger.OpenLog(_curTestSet.FriendlyName); } _logger.WriteStartupInfo(s_seed); } if (testSet.Tests == null) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "No tests to run in test set"); Console.WriteLine("No tests to run, skipping..\r\n"); // no tests in this test set, skip it. continue; } // step 1: preload all the tests, this does NOT start them. _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Preloading tests..."); Console.Write("Loading all tests: "); bool haveAtLeastOneTest = false; for (int i = 0; i < testSet.Tests.Length; i++) { ReliabilityTest test = testSet.Tests[i]; switch (test.TestStartMode) { case TestStartModeEnum.ProcessLoader: Interlocked.Increment(ref LoadingCount); //for the process loader we just need //to fill in some details (such as the full path). TestPreLoader(test, testSet.DiscoveryPaths); if (test.TestObject == null) { Console.WriteLine("Test does not exist: {0}", test); AddFailure("Test does not exist - disabling.", test, -1); } else { haveAtLeastOneTest = true; } break; case TestStartModeEnum.AppDomainLoader: #if PROJECTK_BUILD Console.WriteLine("Appdomain mode is NOT supported for ProjectK"); #else // for the app domain loader we create the // app domains here. This is kinda slow so we // do it all in parallel. try { if (curTestSet.AppDomainLoaderMode != AppDomainLoaderMode.Lazy) { Interlocked.Increment(ref LoadingCount); test.AppDomainIndex = i % curTestSet.NumAppDomains; // only used for roudn robin scheduling. TestPreLoaderDelegate loadTestDelegate = new TestPreLoaderDelegate(this.TestPreLoader); loadTestDelegate.BeginInvoke(test, testSet.DiscoveryPaths, null, null); } haveAtLeastOneTest = true; } catch { } #endif break; } Console.Write("."); } _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Finished Preloading tests..."); if (!haveAtLeastOneTest) { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "No tests to execute"); AddFailure("No tests exist!", null, -1); Console.WriteLine("I have no tests to run!"); continue; } while (LoadingCount != 0) { int tmp = LoadingCount; Console.Write("{0,4}\b\b\b\b", tmp); Thread.Sleep(1000); } _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "All tests loaded..."); Console.WriteLine(""); // update the startTime _startTime = DateTime.Now; // step 2: start all the tests & run them until we're done. // if we're in replay mode we'll replay the start order from the log. if (doReplay) { Console.WriteLine("Replaying from log file {0}.log", _curTestSet.FriendlyName); ExecuteFromLog("Logs\\" + _curTestSet.FriendlyName + ".log"); } else { _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Beginning test run..."); #if !PROJECTK_BUILD SetupGeneralUnload(); #endif TestStarter(); _logger.CloseLog(); } if ((testSet.PercentPassIsPass != -1 && ((_failCount * 100) / _testsRanCount) < (100 - testSet.PercentPassIsPass))) { Console.WriteLine("Some tests failed, but below the fail percent ({0} ran, {1} failed, perecent={2})", _testsRanCount, _failCount, testSet.PercentPassIsPass); _totalSuccess = true; } } if (_detourHelpers != null) { _detourHelpers.Uninitialize(); } if (_totalSuccess) { Console.WriteLine("All tests passed"); return (100); } return (99); }
/// <summary> /// Writes a trace line to the instrumentation log. The instrmentation log is primarily /// used for deeper understanding what has happened during the stress run. /// </summary> /// <param name="level"></param> /// <param name="str"></param> public void WriteToInstrumentationLog(ReliabilityTestSet curTestSet, LoggingLevels level, string str) { if (curTestSet == null || (curTestSet.LoggingLevel & level) != 0) { str = String.Format("[{0} {2}] {1}\r\n", DateTime.Now.ToString(), str, Thread.CurrentThread.ManagedThreadId); try { lock (instrumentationMessageQueue) { instrumentationMessageQueue.Enqueue(str); } } catch (IOException) { /*Eat exceptions for IO */ } catch (InvalidOperationException) { /*Eat exceptions if we can't queue */} } }
public void Initialize(ReliabilityTestSet testSet) { _testSet = testSet; InstallDetours(); }
/// <summary> /// Given a test configfile we find the tests that we actually want to run. /// </summary> private void GetTestsToRun(string testConfig) { int totalDepth = 0; // used for debugging mode so we can keep proper indentation. ArrayList foundTests = new ArrayList(); // the array of tests we've found. ArrayList discoveryPaths = new ArrayList(); // the array of discovery paths we've found. Stack xmlFileStack = new Stack(); // this stack keeps track of our include files. Stack testLevelStack = new Stack(); try { #if PROJECTK_BUILD FileStream fs = new FileStream(testConfig, FileMode.Open, FileAccess.Read, FileShare.Read); xmlFileStack.Push(XmlReader.Create(fs)); #else xmlFileStack.Push(new XmlTextReader(testConfig)); #endif } catch (FileNotFoundException e) { Console.WriteLine("Could not open config file: {0}", testConfig); throw e; } do { #if PROJECTK_BUILD XmlReader currentXML = (XmlReader)xmlFileStack.Pop(); #else XmlTextReader currentXML = (XmlTextReader)xmlFileStack.Pop(); #endif totalDepth -= currentXML.Depth; if (currentXML.Depth != 0) { IndentToDepth(totalDepth + currentXML.Depth - 1); // -1 because we haven't done a .Read on the includes tag yet. XmlDebugOutLine("</" + configInclude + ">"); } while (currentXML.Read()) { switch (currentXML.NodeType) { case XmlNodeType.Element: bool isEmpty = currentXML.IsEmptyElement; IndentToDepth(totalDepth + currentXML.Depth); XmlDebugOut("<" + currentXML.Name); switch (currentXML.Name) { case configInclude: // user included a file in this file. string filename = null; bool skipInclude = false; while (currentXML.MoveToNextAttribute()) { XmlDebugOut(" " + currentXML.Name + "=\"" + currentXML.Value + "\""); switch (currentXML.Name) { case configIncludeFilename: filename = currentXML.Value; break; case debugConfigIncludeInlined: // so we can consume the XML we spit out in debug mode- // we ignore this include tag if it's been inlined. if (currentXML.Value.ToLower() == "true" || currentXML.Value == "1") { skipInclude = true; } break; default: throw new Exception("Unknown attribute on include tag!"); } } if (skipInclude) { XmlDebugOutLine(">"); continue; } XmlDebugOut(" " + debugConfigIncludeInlined + "=\"true\">\r\n"); if (filename == null) { throw new ArgumentException("Type or Filename not set on include file! Both attributes must be set to properly include a file."); } xmlFileStack.Push(currentXML); // save our current file. totalDepth += currentXML.Depth; filename = ConvertPotentiallyRelativeFilenameToFullPath(stripFilenameFromPath(currentXML.BaseURI), filename); try { #if PROJECTK_BUILD currentXML = XmlReader.Create(filename); #else currentXML = new XmlTextReader(filename); #endif } catch (FileNotFoundException e) { Console.WriteLine("Could not open included config file: {0}", filename); throw e; } continue; case configIncludes: if (isEmpty) { XmlDebugOut("/>\r\n"); } else { XmlDebugOut(">\r\n"); } continue; // note: we never push or pop includes off of our stack. case configHost: if (testLevelStack.Count == 0) // we'll skip this tag when it shows up in an included file. { testLevelStack.Push(configHost); while (currentXML.MoveToNextAttribute()) { switch (currentXML.Name) { case "xmlns:xsi": case "xmlns:xsd": break; default: throw new Exception("Unknown attribute on reliability tag: " + currentXML.Name); } } } else { if (isEmpty) { XmlDebugOutLine("/>"); } else { XmlDebugOutLine(">"); } continue; } break; case concurrentConfigTest: if (testLevelStack.Count != 0 && (string)testLevelStack.Peek() != configHost) { throw new ArgumentException("The test tag can only appear as a child to the reliabilityFramework tag or a top level tag."); } // save any info we've gathered about tests into the current test set if (_curTestSet != null && foundTests != null && foundTests.Count > 0) { _curTestSet.Tests = (ReliabilityTest[])foundTests.ToArray(typeof(ReliabilityTest)); _curTestSet.DiscoveryPaths = (string[])discoveryPaths.ToArray(typeof(string)); discoveryPaths.Clear(); foundTests.Clear(); } testLevelStack.Push(concurrentConfigTest); _curTestSet = new ReliabilityTestSet(); while (currentXML.MoveToNextAttribute()) { XmlDebugOut(" " + currentXML.Name + "=\"" + currentXML.Value + "\""); switch (currentXML.Name) { case "maximumTestRuns": _curTestSet.MaximumLoops = Convert.ToInt32(currentXML.Value); break; case "maximumExecutionTime": string timeValue = currentXML.Value; _curTestSet.MaximumTime = ConvertTimeValueToTestRunTime(timeValue); break; case "id": _curTestSet.FriendlyName = currentXML.Value; break; case "xmlns:xsi": case "xmlns:xsd": break; case configTestMinimumMem: _curTestSet.MinPercentMem = Convert.ToInt32(currentXML.Value); break; case configLoggingLevel: _curTestSet.LoggingLevel = (LoggingLevels)Convert.ToInt32(currentXML.Value.ToString(), 16); break; case configTestMinimumCPUStaggered: _curTestSet.MinPercentCPUStaggered = currentXML.Value; break; case configTestMinimumCPU: _curTestSet.MinPercentCPU = Convert.ToInt32(currentXML.Value); break; case configInstallDetours: if (currentXML.Value == "true" || currentXML.Value == "1" || currentXML.Value == "yes") { _curTestSet.InstallDetours = true; } else if (currentXML.Value == "false" || currentXML.Value == "0" || currentXML.Value == "no") { _curTestSet.InstallDetours = false; } else { throw new Exception("Unknown value for result reporting: " + currentXML.Value); } break; case configTestMinimumTests: _curTestSet.MinTestsRunning = Convert.ToInt32(currentXML.Value); break; case RFConfigOptions.RFConfigOptions_Test_MinMaxTestsUseCPUCount: if (GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_MinMaxTestsUseCPUCount)) { int CPUCount = Convert.ToInt32(Environment.GetEnvironmentVariable("NUMBER_OF_PROCESSORS")); if (CPUCount <= 0) throw new Exception("Invalid Value when reading NUMBER_OF_PROCESSORS: {0}" + CPUCount); _curTestSet.MinTestsRunning = CPUCount; _curTestSet.MaxTestsRunning = (int)(CPUCount * 1.5); } break; case RFConfigOptions.RFConfigOptions_Test_SuppressConsoleOutputFromTests: _curTestSet.SuppressConsoleOutputFromTests = GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_SuppressConsoleOutputFromTests); break; case RFConfigOptions.RFConfigOptions_Test_DebugBreakOnHang: _curTestSet.DebugBreakOnTestHang = GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_DebugBreakOnHang); break; case RFConfigOptions.RFConfigOptions_Test_DebugBreakOnBadTest: _curTestSet.DebugBreakOnBadTest = GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_DebugBreakOnBadTest); break; case RFConfigOptions.RFConfigOptions_Test_DebugBreakOnOutOfMemory: _curTestSet.DebugBreakOnOutOfMemory = GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_DebugBreakOnOutOfMemory); break; case RFConfigOptions.RFConfigOptions_Test_DebugBreakOnPathTooLong: _curTestSet.DebugBreakOnPathTooLong = GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_DebugBreakOnPathTooLong); break; case RFConfigOptions.RFConfigOptions_Test_DebugBreakOnMissingTest: _curTestSet.DebugBreakOnMissingTest = GetTrueFalseOptionValue(currentXML.Value, RFConfigOptions.RFConfigOptions_Test_DebugBreakOnMissingTest); break; case configResultReporting: _curTestSet.ReportResults = GetTrueFalseOptionValue(currentXML.Value, configResultReporting); break; case configResultReportingUrl: _curTestSet.ReportResultsTo = currentXML.Value; break; case configResultReportingBvtCategory: try { _curTestSet.BvtCategory = new Guid(currentXML.Value); } catch (FormatException) { throw new Exception(String.Format("BVT Category Guid {0} is not in the correct form", currentXML.Value)); } break; case configTestMaximumTests: _curTestSet.MaxTestsRunning = Convert.ToInt32(currentXML.Value); break; case configTestDisableLogging: _curTestSet.DisableLogging = GetTrueFalseOptionValue(currentXML.Value, configTestDisableLogging); break; case configEnablePerfCounters: _curTestSet.EnablePerfCounters = GetTrueFalseOptionValue(currentXML.Value, configEnablePerfCounters); break; case configDefaultTestStartMode: switch (currentXML.Value) { case configTestStartModeAppDomainLoader: if (null != _curTestSet.DefaultDebugger || null != _curTestSet.DefaultDebuggerOptions) { throw new Exception(String.Format("{0} specified with default debugger or debugger options. If you want a debugger per test please use {1}=\"{2}\" ", configTestStartModeAppDomainLoader, configDefaultTestStartMode, configTestStartModeProcessLoader)); } _curTestSet.DefaultTestStartMode = TestStartModeEnum.AppDomainLoader; break; case configTestStartModeProcessLoader: _curTestSet.DefaultTestStartMode = TestStartModeEnum.ProcessLoader; break; default: throw new Exception(String.Format("Unknown test starter {0} specified!", currentXML.Value)); } break; case configRoundRobinAppDomainCount: try { _curTestSet.NumAppDomains = Convert.ToInt32(currentXML.Value); if (_curTestSet.NumAppDomains <= 0) { throw new Exception("Number of app domains must be greater than zero!"); } } catch { throw new Exception(String.Format("The value {0} is not an integer", currentXML.Value)); } break; case configAppDomainLoaderMode: switch (currentXML.Value) { case configAppDomainLoaderModeFullIsolation: _curTestSet.AppDomainLoaderMode = AppDomainLoaderMode.FullIsolation; break; case configAppDomainLoaderModeNormal: _curTestSet.AppDomainLoaderMode = AppDomainLoaderMode.Normal; break; case configAppDomainLoaderModeRoundRobin: _curTestSet.AppDomainLoaderMode = AppDomainLoaderMode.RoundRobin; break; case configAppDomainLoaderModeLazy: _curTestSet.AppDomainLoaderMode = AppDomainLoaderMode.Lazy; break; default: throw new Exception(String.Format("Unknown AD Loader mode {0} specified!", currentXML.Value)); } break; case configPercentPassIsPass: _curTestSet.PercentPassIsPass = Convert.ToInt32(currentXML.Value); break; case configDefaultDebugger: if (currentXML.Value.Length >= 7 && currentXML.Value.Substring(currentXML.Value.Length - 7).ToLower() == "cdb.exe") { _curTestSet.DefaultDebugger = currentXML.Value; } else if (currentXML.Value.Length >= 10 && currentXML.Value.Substring(currentXML.Value.Length - 7).ToLower() == "windbg.exe") { _curTestSet.DefaultDebugger = currentXML.Value; } else if (currentXML.Value.ToLower() == "none") { _curTestSet.DefaultDebugger = String.Empty; } else { throw new Exception("Unknown default debugger specified (" + currentXML.Value + ")"); } break; case configDefaultDebuggerOptions: _curTestSet.DefaultDebuggerOptions = Environment.ExpandEnvironmentVariables(currentXML.Value); break; case configULAssemblyLoadPercent: _curTestSet.ULAssemblyLoadPercent = Convert.ToInt32(currentXML.Value); break; case configULAppDomainUnloadPercent: _curTestSet.ULAppDomainUnloadPercent = Convert.ToInt32(currentXML.Value); break; case configULGeneralUnloadPercent: _curTestSet.ULGeneralUnloadPercent = Convert.ToInt32(currentXML.Value); break; case configULWaitTime: _curTestSet.ULWaitTime = Convert.ToInt32(currentXML.Value); break; case configCcFailMail: _curTestSet.CCFailMail = currentXML.Value; break; default: throw new Exception("Unknown attribute (" + currentXML.Name + ") on " + concurrentConfigTest + " tag!"); } } // Check to see if any of the test attribute environment variables are set, // If so, then use the environment variables. if ((Environment.GetEnvironmentVariable("TIMELIMIT") != null) && (Environment.GetEnvironmentVariable("TIMELIMIT") != "")) _curTestSet.MaximumTime = ConvertTimeValueToTestRunTime(Environment.GetEnvironmentVariable("TIMELIMIT")); if ((Environment.GetEnvironmentVariable("MINCPU") != null) && (Environment.GetEnvironmentVariable("MINCPU") != "")) _curTestSet.MinPercentCPU = Convert.ToInt32(Environment.GetEnvironmentVariable("MINCPU")); _testSet.Add(_curTestSet); break; case configDiscovery: if (testLevelStack.Count == 0 || (string)testLevelStack.Peek() != concurrentConfigTest) { throw new ArgumentException("The assembly tag can only appear as a child to the test tag (curent parent tag==" + (string)testLevelStack.Peek() + ")."); } testLevelStack.Push(configDiscovery); string path = null; while (currentXML.MoveToNextAttribute()) { XmlDebugOut(" " + currentXML.Name + "=\"" + currentXML.Value + "\""); switch (currentXML.Name) { case configDiscoveryPath: path = currentXML.Value; break; default: throw new Exception("Unknown attribute on include tag (\"" + currentXML.Name + "\")!"); } } discoveryPaths.Add(Environment.ExpandEnvironmentVariables(path)); break; case concurrentConfigAssembly: /*********************************************************************** * Here's where we process an assembly & it's options. * ***********************************************************************/ bool disabled = false; if (testLevelStack.Count == 0 || (string)testLevelStack.Peek() != concurrentConfigTest) { throw new ArgumentException("The assembly tag can only appear as a child to the test tag (curent parent tag==" + (string)testLevelStack.Peek() + ")."); } testLevelStack.Push(concurrentConfigAssembly); ReliabilityTest rt = new ReliabilityTest(_curTestSet.SuppressConsoleOutputFromTests); rt.TestStartMode = _curTestSet.DefaultTestStartMode; // first we need to setup any default options which are set globally on // the test start mode. if (null != _curTestSet.DefaultDebugger) { if (_curTestSet.DefaultTestStartMode != TestStartModeEnum.ProcessLoader) { throw new Exception(String.Format("{0} specified with default debugger or debugger options. If you want a debugger per test please use {1}=\"{2}\" ", configTestStartModeAppDomainLoader, configDefaultTestStartMode, configTestStartModeProcessLoader)); } rt.Debugger = _curTestSet.DefaultDebugger; } if (null != _curTestSet.DefaultDebuggerOptions) { if (_curTestSet.DefaultTestStartMode != TestStartModeEnum.ProcessLoader) { throw new Exception(String.Format("{0} specified with default debugger or debugger options. If you want a debugger per test please use {1}=\"{2}\" ", configTestStartModeAppDomainLoader, configDefaultTestStartMode, configTestStartModeProcessLoader)); } rt.DebuggerOptions = _curTestSet.DefaultDebuggerOptions; } // then we need to process the individual options & overrides. while (currentXML.MoveToNextAttribute()) { XmlDebugOut(" " + currentXML.Name + "=\"" + currentXML.Value + "\""); switch (currentXML.Name) { case configAssemblyName: rt.RefOrID = currentXML.Value; break; case configAssemblyBasePath: rt.BasePath = Environment.ExpandEnvironmentVariables(currentXML.Value); break; case configAssemblyRequiresSDK: if (String.Compare(currentXML.Value, "true", true) == 0 || currentXML.Value == "1" || String.Compare(currentXML.Value, "yes", true) == 0) { rt.RequiresSDK = true; } else if (String.Compare(currentXML.Value, "false", true) == 0 || currentXML.Value == "0" || String.Compare(currentXML.Value, "no", true) == 0) { rt.RequiresSDK = false; } else { throw new Exception("RequiresSDK has illegal value. Must be true, 1, yes, false, 0, or no"); } break; case configAssemblyFilename: rt.Assembly = Environment.ExpandEnvironmentVariables(currentXML.Value); Console.WriteLine("test is " + rt.Assembly); break; case configAssemblySuccessCode: rt.SuccessCode = Convert.ToInt32(currentXML.Value); break; case configAssemblyEntryPoint: rt.Arguments = currentXML.Value; break; case configAssemblyArguments: if (!string.IsNullOrEmpty(currentXML.Value)) rt.Arguments = Environment.ExpandEnvironmentVariables(currentXML.Value); break; case configAssemblyConcurrentCopies: rt.ConcurrentCopies = Convert.ToInt32(currentXML.Value); break; case configAssemblyStatus: if (currentXML.Value == configAssemblyStatusDisabled) { disabled = true; } break; case configAssemblyDebugger: if (TestStartModeEnum.ProcessLoader != _curTestSet.DefaultTestStartMode) { throw new Exception(String.Format("{0} can only be set for test sets with {1}=\"{2}\" set.", configAssemblyDebugger, configDefaultTestStartMode, configTestStartModeProcessLoader)); } if (currentXML.Value.Length >= 7 && currentXML.Value.Substring(currentXML.Value.Length - 7).ToLower() == "cdb.exe") { rt.Debugger = currentXML.Value; } else if (currentXML.Value.Length >= 10 && currentXML.Value.Substring(currentXML.Value.Length - 7).ToLower() == "windbg.exe") { rt.Debugger = currentXML.Value; } else if (currentXML.Value.ToLower() == "none") { rt.Debugger = String.Empty; } else { throw new Exception("Unknown debugger specified (" + currentXML.Value + ")"); } break; case configAssemblyDebuggerOptions: if (TestStartModeEnum.ProcessLoader != _curTestSet.DefaultTestStartMode) { throw new Exception(String.Format("{0} can only be set for test sets with {1}=\"{2}\" set.", configAssemblyDebuggerOptions, configDefaultTestStartMode, configTestStartModeProcessLoader)); } rt.DebuggerOptions = Environment.ExpandEnvironmentVariables(currentXML.Value); break; case configAssemblySmartNetGuid: try { rt.Guid = new Guid(currentXML.Value); } catch (FormatException) { throw new Exception(String.Format("The format for guid {0} on test {1} is invalid", currentXML.Value, rt.RefOrID)); } break; case configAssemblyDuration: if (currentXML.Value.IndexOf(":") == -1) { // just a number of minutes rt.ExpectedDuration = Convert.ToInt32(currentXML.Value); } else { // time span try { rt.ExpectedDuration = unchecked((int)(TimeSpan.Parse(currentXML.Value).Ticks / TimeSpan.TicksPerMinute)); } catch { throw new Exception(String.Format("Bad time span {0} for expected duration.", currentXML.Value)); } } break; case configAssemblyTestAttributes: string[] attrs = currentXML.Value.Split(';'); TestAttributes testAttrs = TestAttributes.None; for (int j = 0; j < attrs.Length; j++) { switch (attrs[j].ToLower()) { case "requiressta": testAttrs |= TestAttributes.RequiresSTAThread; break; case "requiresmta": testAttrs |= TestAttributes.RequiresMTAThread; break; default: throw new Exception(String.Format("Unknown test attribute: {0}", attrs[j])); } } rt.TestAttrs = testAttrs; break; case configAssemblyTestLoader: switch (currentXML.Value) { case configTestStartModeAppDomainLoader: if (null != rt.Debugger || null != rt.DebuggerOptions) { throw new Exception(String.Format("{0} specified with debugger or debugger options. If you want a debugger per test please use {1}=\"{2}\" ", configTestStartModeAppDomainLoader, configDefaultTestStartMode, configTestStartModeProcessLoader)); } rt.TestStartMode = TestStartModeEnum.AppDomainLoader; break; case configTestStartModeProcessLoader: rt.TestStartMode = TestStartModeEnum.ProcessLoader; break; default: throw new Exception(String.Format("Unknown test starter {0} specified!", currentXML.Value)); } break; case configAssemblyTestOwner: rt.TestOwner = currentXML.Value; break; case configAssemblyTestGroup: string groupName = currentXML.Value; // first, we want to see if another test has this group. We store the group name in // our group List as the 1st entry. If we find a group we set our List // arraylist to that same List (and add ourselves to it). We're then all in // one group, the arraylist. int i = 0; for (i = 0; i < foundTests.Count; i++) { ReliabilityTest test = foundTests[i] as ReliabilityTest; Debug.Assert(test != null, "Non reliability test in foundTests array!"); if (null != test.Group) { string curGroupName = test.Group[0].ToString(); if (String.Compare(curGroupName, groupName, false) == 0) { test.Group.Add(rt); rt.Group = test.Group; break; } } } if (rt.Group == null) { // this is the first test in this group rt.Group = new List<ReliabilityTest>(); rt.Group.Add(rt); } break; case configAssemblyPostCommand: if (rt.PostCommands == null) { // first pre command on this test rt.PostCommands = new List<string>(); } rt.PostCommands.Add(Environment.ExpandEnvironmentVariables(currentXML.Value)); break; case configAssemblyPreCommand: if (rt.PreCommands == null) { // first pre command on this test rt.PreCommands = new List<string>(); } rt.PreCommands.Add(Environment.ExpandEnvironmentVariables(currentXML.Value)); break; case configAssemblyCustomAction: switch (currentXML.Value) { case "LegacySecurityPolicy": rt.CustomAction = CustomActionType.LegacySecurityPolicy; break; default: throw new Exception(String.Format("Unknown custom action: {0}", currentXML.Value)); } break; default: throw new Exception("Unknown attribute on assembly tag (" + currentXML.Name + "=" + currentXML.Value + ")"); } } // if the test is disabled or it requires the SDK to be installed & // we don't have the SDK installed then don't add it to our list // of tests to run. if (disabled || (rt.RequiresSDK == true && Environment.GetEnvironmentVariable("INSTALL_SDK") == null)) { break; } int testCopies = 1; if (_curTestSet.AppDomainLoaderMode == AppDomainLoaderMode.FullIsolation) { // in this mode each copy of the test is ran in it's own app domain, // fully isolated from all other copies of the test. If the user // specified a cloning level we need to duplicate the test. testCopies = rt.ConcurrentCopies; rt.ConcurrentCopies = 1; } else if (_curTestSet.AppDomainLoaderMode == AppDomainLoaderMode.RoundRobin) { // In this mode each test is ran in an app domain w/ other tests. testCopies = rt.ConcurrentCopies; rt.ConcurrentCopies = 1; } else { // Normal mode - tests are ran in app domains w/ copies of themselves } string refOrId = rt.RefOrID; if (rt.RefOrID == null || rt.RefOrID == String.Empty) { refOrId = rt.Assembly + rt.Arguments; } for (int j = 0; j < testCopies; j++) { if (testCopies > 1) { rt.RefOrID = String.Format("{0} Copy {1}", refOrId, j); } else { rt.RefOrID = refOrId; } bool fRetry; do { fRetry = false; for (int i = 0; i < foundTests.Count; i++) { if (((ReliabilityTest)foundTests[i]).RefOrID == rt.RefOrID) { rt.RefOrID = rt.RefOrID + "_" + i.ToString(); fRetry = true; break; } } } while (fRetry); ReliabilityTest clone = (ReliabilityTest)rt.Clone(); clone.Index = foundTests.Add(clone); } break; default: throw new ArgumentException("Unknown node (\"" + currentXML.NodeType + "\") named \"" + currentXML.Name + "\"=\"" + currentXML.Value + "\" in config file!"); } // end of switch(currentXML.Name) if (isEmpty) { XmlDebugOut("/>\r\n"); testLevelStack.Pop(); } else { XmlDebugOut(">\r\n"); } break; case XmlNodeType.Text: case XmlNodeType.CDATA: case XmlNodeType.ProcessingInstruction: case XmlNodeType.Comment: case XmlNodeType.Document: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: break; case XmlNodeType.EndElement: IndentToDepth(totalDepth + currentXML.Depth); XmlDebugOutLine("</" + currentXML.Name + ">"); // note: we never pop or push the includes tag. It's a special 'hidden' tag // we should also never have to pop a configInclude tag, but it might happen if (currentXML.Name != configIncludes && currentXML.Name != configInclude && currentXML.Name != configHost) { testLevelStack.Pop(); } break; } // end of switch(currentXML.NodeType) } // end of while(currentXML.Read()) } while (xmlFileStack.Count > 0); if (_curTestSet != null && foundTests != null && foundTests.Count > 0) { _curTestSet.Tests = (ReliabilityTest[])foundTests.ToArray(typeof(ReliabilityTest)); _curTestSet.DiscoveryPaths = (string[])discoveryPaths.ToArray(typeof(string)); discoveryPaths.Clear(); foundTests.Clear(); } }
public void Initialize(ReliabilityTestSet testSet) { _testSet = testSet; InstallDetours(); }