/// <summary>
    /// Runs the reliability tests.  Called from Main with the name of the configuration file we should be using.
    /// All code in here runs in our starting app domain.  
    /// </summary>
    /// <param name="testConfig">configuration file to use</param>
    /// <returns>100 on sucess, another number on failure.</returns>
    public int RunReliabilityTests(string testConfig, bool doReplay)
    {
        _totalSuccess = true;

        try
        {
            _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Getting configuration...");
            _reliabilityConfig = new ReliabilityConfig(testConfig);
        }
        catch (ArgumentException e)
        {
            _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, String.Format("Error while getting configuration: {0}", e));
            return (-1);
        }
        catch (FileNotFoundException fe)
        {
            _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, String.Format("Couldn't find configuration file: {0}", fe));
            return (-1);
        }

        // save the current directory
        string curDir = Directory.GetCurrentDirectory();

#if !PROJECTK_BUILD
        if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("BASE_ROOT")))
        {
            Environment.SetEnvironmentVariable("BASE_ROOT", Environment.CurrentDirectory);
        }
#endif

        // Enumerator through all the test sets...					
        foreach (ReliabilityTestSet testSet in _reliabilityConfig)
        {
            if (testSet.InstallDetours)
            {
                _detourHelpers = new DetourHelpers();
                _detourHelpers.Initialize(testSet);
            }
            else
            {
                if (_detourHelpers != null)
                {
                    _detourHelpers.Uninitialize();
                }
                _detourHelpers = null;
            }

            // restore the current directory incase a test changed it
            Directory.SetCurrentDirectory(curDir);

            _logger.WriteToInstrumentationLog(testSet, LoggingLevels.Tests, String.Format("Executing test set: {0}", testSet.FriendlyName));
            _testsRunningCount = 0;
            _testsRanCount = 0;
            _curTestSet = testSet;
            _logger.ReportResults = _curTestSet.ReportResults;

#if !PROJECTK_BUILD
            if (curTestSet.AppDomainLoaderMode == AppDomainLoaderMode.RoundRobin)
            {
                // full isoloation & normal are handled by the way we setup
                // tests in ReliabilityConfiguration.  Round robin needs extra
                // logic when we create app domains.
                _testDomains = new AppDomain[curTestSet.NumAppDomains];
                for (int domain = 0; domain < curTestSet.NumAppDomains; domain++)
                {
                    _testDomains[domain] = AppDomain.CreateDomain("RoundRobinDomain" + domain.ToString());
                }
            }
#endif
            if (_curTestSet.ReportResults)
            {
                _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.SmartDotNet, "Reporting results...");
                try
                {
#if !PROJECTK_BUILD
                    resultReporter = new Result();
                    resultReporter.Credentials = CredentialCache.DefaultCredentials;

                    resultReporter.Url = curTestSet.ReportResultsTo + "/" + "result.asmx";
                    WebRequest.DefaultWebProxy = null;

                    Build buildObj = new Build();
                    buildObj.Url = curTestSet.ReportResultsTo + "/" + "build.asmx";
                    buildObj.Credentials = CredentialCache.DefaultCredentials;
                    Guid langGuid = Guid.Empty;

                    DataSet ds = buildObj.GetGeneralBuildInfo();
                    foreach (DataTable myTable in ds.Tables)
                    {
                        if (myTable.TableName == "BuildAttributeValue")
                        {
                            foreach (DataRow myRow in myTable.Rows)
                            {
                                if (myRow["AttributeValueName"].ToString() == "English")
                                {
                                    langGuid = new Guid(myRow["AttributeValueGuid"].ToString());
                                }
                            }

                        }
                    }

                    Guid buildGuid = buildObj.GetBuildGuid(
                        Environment.GetEnvironmentVariable("WHIDBEY_TREE"), // build lab
                        Environment.GetEnvironmentVariable("SHORTFLAVOR"),  // build flavor, string
                        Environment.GetEnvironmentVariable("VERSION"),      // build version, string
                        new Guid[] { langGuid });                                         // build attributes, Guid[]

                    resultGroupGuid = resultReporter.CreateResultGroupEx(
                        Guid.NewGuid(),
                        String.Format("{0} - {1}", Environment.MachineName, DateTime.Now.ToShortDateString()),
                        curTestSet.BvtCategory,
                        new Guid[] { buildGuid },
                        new Guid[] { },      // TODO: fill in environmental attributes
                        false,
                        0,
                        "StressRun");
#endif
                }
                catch (Exception e)
                {
                    _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.SmartDotNet, String.Format("Exception while communicating w/ smart.net server: {0}", e));
#if !PROJECTK_BUILD
                    resultReporter = null;
#endif
                    AddFailure("Failed to initialize result reporting", null, -1);
                }
            }

            // we don't log while we're replaying a log file.
            if (!doReplay)
            {
                _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Logging, "Opening log file...");
                if (!_curTestSet.DisableLogging)
                {
                    _logger.OpenLog(_curTestSet.FriendlyName);
                }
                _logger.WriteStartupInfo(s_seed);
            }

            if (testSet.Tests == null)
            {
                _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "No tests to run in test set");
                Console.WriteLine("No tests to run, skipping..\r\n");
                // no tests in this test set, skip it.
                continue;
            }

            // step 1: preload all the tests, this does NOT start them.
            _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Preloading tests...");
            Console.Write("Loading all tests: ");
            bool haveAtLeastOneTest = false;
            for (int i = 0; i < testSet.Tests.Length; i++)
            {
                ReliabilityTest test = testSet.Tests[i];

                switch (test.TestStartMode)
                {
                    case TestStartModeEnum.ProcessLoader:
                        Interlocked.Increment(ref LoadingCount);

                        //for the process loader we just need
                        //to fill in some details (such as the full path).
                        TestPreLoader(test, testSet.DiscoveryPaths);
                        if (test.TestObject == null)
                        {
                            Console.WriteLine("Test does not exist: {0}", test);
                            AddFailure("Test does not exist - disabling.", test, -1);
                        }
                        else
                        {
                            haveAtLeastOneTest = true;
                        }

                        break;
                    case TestStartModeEnum.AppDomainLoader:
#if PROJECTK_BUILD
                        Console.WriteLine("Appdomain mode is NOT supported for ProjectK");
#else
                        // for the app domain loader we create the
                        // app domains here.  This is kinda slow so we
                        // do it all in parallel.
                        try
                        {
                            if (curTestSet.AppDomainLoaderMode != AppDomainLoaderMode.Lazy)
                            {
                                Interlocked.Increment(ref LoadingCount);

                                test.AppDomainIndex = i % curTestSet.NumAppDomains;	// only used for roudn robin scheduling.
                                TestPreLoaderDelegate loadTestDelegate = new TestPreLoaderDelegate(this.TestPreLoader);
                                loadTestDelegate.BeginInvoke(test, testSet.DiscoveryPaths, null, null);
                            }
                            haveAtLeastOneTest = true;
                        }
                        catch { }
#endif
                        break;
                }
                Console.Write(".");
            }

            _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Finished Preloading tests...");
            if (!haveAtLeastOneTest)
            {
                _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "No tests to execute");
                AddFailure("No tests exist!", null, -1);
                Console.WriteLine("I have no tests to run!");
                continue;
            }

            while (LoadingCount != 0)
            {
                int tmp = LoadingCount;
                Console.Write("{0,4}\b\b\b\b", tmp);
                Thread.Sleep(1000);
            }

            _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "All tests loaded...");
            Console.WriteLine("");

            // update the startTime
            _startTime = DateTime.Now;

            // step 2: start all the tests & run them until we're done.
            //          if we're in replay mode we'll replay the start order from the log.

            if (doReplay)
            {
                Console.WriteLine("Replaying from log file {0}.log", _curTestSet.FriendlyName);
                ExecuteFromLog("Logs\\" + _curTestSet.FriendlyName + ".log");
            }
            else
            {
                _logger.WriteToInstrumentationLog(_curTestSet, LoggingLevels.Tests, "Beginning test run...");
#if !PROJECTK_BUILD
                SetupGeneralUnload();
#endif 
                TestStarter();
                _logger.CloseLog();
            }

            if ((testSet.PercentPassIsPass != -1 && ((_failCount * 100) / _testsRanCount) < (100 - testSet.PercentPassIsPass)))
            {
                Console.WriteLine("Some tests failed, but below the fail percent ({0} ran, {1} failed, perecent={2})", _testsRanCount, _failCount, testSet.PercentPassIsPass);
                _totalSuccess = true;
            }
        }

        if (_detourHelpers != null)
        {
            _detourHelpers.Uninitialize();
        }

        if (_totalSuccess)
        {
            Console.WriteLine("All tests passed");
            return (100);
        }
        return (99);
    }