/// <summary>
        /// The main loop of the class. This method will run through the specified number of iterations on all the
        /// specified browsers across all the specified scenarios.
        /// </summary>
        public void Run()
        {
            bool passSucceeded = true;

            LogOsVersion();

            if (_executeWarmupRun)
            {
                Logger.LogWriteLine(" Starting warmup run");
                ScenarioEventSourceProvider.EventLog.WarmupExecutionStart();
                foreach (string browser in _browsers)
                {
                    // Even though we are not collecting any tracing during warmup runs, we need to pass a dummy ElevatorClient to the ExecuteWorkload function.
                    using (var elevatorClient = ElevatorClient.Create(false))
                    {
                        Logger.LogWriteLine(string.Format(" Executing warmup of {0} browser", browser));
                        // use -1 as the iteration value to denote warmup run
                        ExecuteWorkload(-1, browser, "None", "", "", true, false, elevatorClient);
                        Logger.LogWriteLine($" Completed warmup of {browser} browser");
                    }
                }
                ScenarioEventSourceProvider.EventLog.WarmupExecutionStop();
                Logger.LogWriteLine(" Completed warmup run");
            }

            if (_useTimer)
            {
                _timer.Enable();
            }

            if (_usingTraceController)
            {
                Logger.LogWriteLine("Pausing before starting first tracing session to reduce interference.");

                // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures
                // that the browser energy data reported by E3 going forward is from this test run and not from before running the test pass.
                Thread.Sleep(_e3RefreshDelaySeconds * 1000);
            }

            using (var elevatorClient = ElevatorClient.Create(_usingTraceController))
            {
                elevatorClient.ConnectAsync().Wait();
                elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_PASS} {_etlPath}").Wait();
                Logger.LogWriteLine("Starting Test Pass");

                // Core Execution Loop
                // TODO: Consider breaking up this large loop into smaller methods to ease readability.
                for (int iteration = 0; iteration < _iterations; iteration++)
                {
                    Logger.LogWriteLine($"Iteration: {iteration} ------------------");
                    _timer.SetIteration(iteration);
                    foreach (var currentMeasureSet in _measureSets)
                    {
                        if (_captureBaseline && _usingTraceController)
                        {
                            // capture a baseline of the system for this measureset
                            // A baseline is where we capture measureset data of the system but without running the browser and test pass.
                            // The idea is to get a baseline performance capture of the system without the browser and test pass so it
                            // can be used as a comparison.

                            Logger.LogWriteLine($" Starting capture of system baseline for {_baselineCaptureSeconds} seconds - measureset {currentMeasureSet.Value.Item1}  iteration {iteration}");

                            // Start the trace capture for baseline scenario
                            elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} BASE ITERATION {iteration} SCENARIO_NAME BaseLineCapture WPRPROFILE {currentMeasureSet.Value.Item1} MODE {currentMeasureSet.Value.Item2}").Wait();

                            ScenarioEventSourceProvider.EventLog.MeasurementRegionStart("BaselineCapture");

                            Thread.Sleep(_baselineCaptureSeconds * 1000);

                            ScenarioEventSourceProvider.EventLog.MeasurementRegionStop("BaselineCapture");

                            Logger.LogWriteLine($" Finished capture of system baseline of measureset {currentMeasureSet.Value.Item1}  iteration {iteration}");

                            // End the trace capture for baseline scenario
                            elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} BASE").Wait();

                            // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures
                            // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs.
                            Logger.LogWriteLine("  Pausing between tracing sessions to reduce interference.");
                            Thread.Sleep(_e3RefreshDelaySeconds * 1000);
                        }

                        _timer.SetMeasureSet(currentMeasureSet.Key);

                        // Randomize the order the browsers each iteration to reduce systematic bias in the test
                        Random rand = new Random();
                        _browsers = _browsers.OrderBy(a => rand.Next()).ToList <String>();

                        foreach (string browser in _browsers)
                        {
                            _timer.SetBrowser(browser, _extensionsNameAndVersion);

                            passSucceeded = ExecuteWorkload(iteration, browser, currentMeasureSet.Key, currentMeasureSet.Value.Item1, currentMeasureSet.Value.Item2, _overrideTimeout, _usingTraceController, elevatorClient);
                        }
                    }
                }

                CleanupExtensions();
                Logger.LogWriteLine("Completed Test Pass");
                elevatorClient.SendControllerMessageAsync(Elevator.Commands.END_PASS).Wait();
            }
        }
        /// <summary>
        /// The main loop of the class. This method will run through the specified number of iterations on all the
        /// specified browsers across all the specified scenarios.
        /// </summary>
        public void Run()
        {
            LogOsVersion();

            if (_useTimer)
            {
                _timer.Enable();
            }

            if (_usingTraceController)
            {
                Logger.LogWriteLine("Pausing before starting first tracing session to reduce interference.");

                // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures
                // that the browser energy data reported by E3 going forward is from this test run and not from before running the test pass.
                Thread.Sleep(_e3RefreshDelaySeconds * 1000);
            }

            using (var elevatorClient = ElevatorClient.Create(_usingTraceController))
            {
                elevatorClient.ConnectAsync().Wait();
                elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_PASS} {_etlPath}").Wait();
                Logger.LogWriteLine("Starting Test Pass");

                // Core Execution Loop
                // TODO: Consider breaking up this large loop into smaller methods to ease readability.
                for (int iteration = 0; iteration < _iterations; iteration++)
                {
                    Logger.LogWriteLine(string.Format("Iteration: {0} ------------------", iteration));
                    _timer.SetIteration(iteration);
                    foreach (var currentMeasureSet in _measureSets)
                    {
                        if (_captureBaseline && _usingTraceController)
                        {
                            // capture a baseline of the system for this measureset
                            // A baseline is where we capture measureset data of the system but without running the browser and test pass.
                            // The idea is to get a baseline performance capture of the system without the browser and test pass so it
                            // can be used as a comparison.

                            Logger.LogWriteLine(string.Format(" Starting capture of system baseline for {0} seconds - measureset {1}  iteration {2}", _baselineCaptureSeconds, currentMeasureSet.Value.Item1, iteration));

                            // Start the trace capture
                            elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} BASE ITERATION {iteration} SCENARIO_NAME BaseLineCapture WPRPROFILE {currentMeasureSet.Value.Item1} MODE {currentMeasureSet.Value.Item2}").Wait();

                            Thread.Sleep(_baselineCaptureSeconds * 1000);

                            Logger.LogWriteLine(string.Format(" Finished capture of system baseline of measureset {0}  iteration {1}", currentMeasureSet.Value.Item1, iteration));
                            elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} BASE").Wait();

                            // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures
                            // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs.
                            Logger.LogWriteLine("  Pausing between tracing sessions to reduce interference.");
                            Thread.Sleep(_e3RefreshDelaySeconds * 1000);
                        }

                        _timer.SetMeasureSet(currentMeasureSet.Key);

                        // Randomize the order the browsers each iteration to reduce systematic bias in the test
                        Random rand = new Random();
                        _browsers = _browsers.OrderBy(a => rand.Next()).ToList <String>();

                        foreach (string browser in _browsers)
                        {
                            _timer.SetBrowser(browser, _extensionsNameAndVersion);

                            bool passSucceeded = false;
                            for (int attemptNumber = 0; attemptNumber < _maxAttempts && !passSucceeded; attemptNumber++)
                            {
                                if (attemptNumber > 0)
                                {
                                    Logger.LogWriteLine("  Attempting again...");
                                }

                                elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} {browser} ITERATION {iteration} SCENARIO_NAME {_scenarioName} WPRPROFILE {currentMeasureSet.Value.Item1} MODE {currentMeasureSet.Value.Item2}").Wait();

                                Logger.LogWriteLine(string.Format(" Launching Browser Driver: '{0}'", browser));
                                ScenarioEventSourceProvider.EventLog.WorkloadStart(_scenarioName, browser, currentMeasureSet.Value.Item1, iteration, attemptNumber);
                                using (var driver = RemoteWebDriverExtension.CreateDriverAndMaximize(browser, _browserProfilePath, _extensionsPaths))
                                {
                                    string currentScenario = "";
                                    try
                                    {
                                        Stopwatch watch           = Stopwatch.StartNew();
                                        bool      isFirstScenario = true;

                                        _timer.SetDriver(driver);

                                        foreach (var scenario in _scenarios)
                                        {
                                            currentScenario = scenario.ScenarioName;
                                            _timer.SetScenario(scenario.ScenarioName);

                                            // We want every scenario to take the same amount of time total, even if there are changes in
                                            // how long pages take to load. The biggest reason for this is so that you can measure energy
                                            // or power and their ratios will be the same either way.
                                            // So start by getting the current time.
                                            var startTime = watch.Elapsed;

                                            // The first scenario naviagates in the browser's new tab / welcome page.
                                            // After that, scenarios open in their own tabs
                                            if (!isFirstScenario && scenario.Tab == "new")
                                            {
                                                driver.CreateNewTab();
                                            }
                                            else
                                            {
                                                isFirstScenario = false;
                                            }

                                            Logger.LogWriteLine(string.Format("  Executing - Scenario: {0}  Iteration: {1}  Attempt: {2}  Browser: {3}  MeasureSet: {4}", scenario.Scenario.Name, iteration, attemptNumber, browser, currentMeasureSet.Key));
                                            ScenarioEventSourceProvider.EventLog.ScenarioExecutionStart(browser, scenario.Scenario.Name);

                                            // Here, control is handed to the scenario to navigate, and do whatever it wants
                                            scenario.Scenario.Run(driver, browser, _logins, _timer);

                                            ScenarioEventSourceProvider.EventLog.ScenarioExecutionStop(browser, scenario.Scenario.Name);

                                            // When we get control back, we sleep for the remaining time for the scenario. This ensures
                                            // the total time for a scenario is always the same
                                            var runTime  = watch.Elapsed.Subtract(startTime);
                                            var timeLeft = TimeSpan.FromSeconds(scenario.Duration).Subtract(runTime);
                                            if (timeLeft < TimeSpan.FromSeconds(0) && !_overrideTimeout)
                                            {
                                                // Of course it's possible we don't get control back until after we were supposed to
                                                // continue to the next scenario. In that case, invalidate the run by throwing.
                                                Logger.LogWriteLine(string.Format("   !!! Scenario {0} ran longer than expected! The browser ran for {1}s. The timeout for this scenario is {2}s.", scenario.Scenario.Name, runTime.TotalSeconds, scenario.Duration));
                                                throw new Exception(string.Format("Scenario {0} ran longer than expected! The browser ran for {1}s. The timeout for this scenario is {2}s.", scenario.Scenario.Name, runTime.TotalSeconds, scenario.Duration));
                                            }
                                            else if (!_overrideTimeout)
                                            {
                                                Logger.LogWriteLine(string.Format("    Scenario {0} returned in {1} seconds. Sleep for remaining {2} seconds.", scenario.Scenario.Name, runTime.TotalSeconds, timeLeft.TotalSeconds));
                                                driver.Wait(timeLeft.TotalSeconds);
                                            }

                                            Logger.LogWriteLine(string.Format("  Completed - Scenario: {0}  Iteration: {1}  Attempt: {2}  Browser: {3}  MeasureSet: {4}", scenario.Scenario.Name, iteration, attemptNumber, browser, currentMeasureSet.Key, runTime.TotalSeconds));
                                        }

                                        driver.CloseBrowser(browser);
                                        passSucceeded = true;
                                        Logger.LogWriteLine(string.Format(" SUCCESS!  Completed Browser: {0}  Iteration: {1}  Attempt: {2}  MeasureSet: {3}", browser, iteration, attemptNumber, currentMeasureSet.Key));
                                        ScenarioEventSourceProvider.EventLog.WorkloadStop(_scenarioName, browser, currentMeasureSet.Value.Item1, iteration, attemptNumber);
                                    }
                                    catch (Exception ex)
                                    {
                                        // If something goes wrong and we get an exception halfway through the scenario, we clean up
                                        // and put everything back into a state where we can start the next iteration.
                                        elevatorClient.SendControllerMessageAsync(Elevator.Commands.CANCEL_PASS);

                                        try
                                        {
                                            // Attempt to save the page source
                                            string pageSourceFileName = string.Format("pageSource_{0}_{1}_{2}_{3}_{4}.html", browser, currentScenario, iteration, currentMeasureSet.Key, attemptNumber);
                                            pageSourceFileName = Path.Combine(_etlPath, pageSourceFileName);
                                            using (StreamWriter sw = new StreamWriter(pageSourceFileName, false))
                                            {
                                                sw.WriteLine(driver.PageSource);
                                            }

                                            // Attempt to save a screenshot
                                            OpenQA.Selenium.Screenshot screenshot = driver.GetScreenshot();
                                            string imageFileName = string.Format("screenshot_{0}_{1}_{2}_{3}_{4}.png", browser, currentScenario, iteration, currentMeasureSet.Key, attemptNumber);
                                            imageFileName = Path.Combine(_etlPath, imageFileName);
                                            screenshot.SaveAsFile(imageFileName, OpenQA.Selenium.ScreenshotImageFormat.Png);
                                        }
                                        catch (Exception)
                                        {
                                            // ignore this exception as we were just trying to see if we could get a screenshot and pagesource for the original exception.
                                        }

                                        driver.CloseBrowser(browser);
                                        Logger.LogWriteLine("------ EXCEPTION caught while trying to run scenario! ------------------------------------");
                                        Logger.LogWriteLine(string.Format("    Iteration:   {0}", iteration));
                                        Logger.LogWriteLine(string.Format("    Measure Set: {0}", currentMeasureSet.Key));
                                        Logger.LogWriteLine(string.Format("    Browser:     {0}", browser));
                                        Logger.LogWriteLine(string.Format("    Attempt:     {0}", attemptNumber));
                                        Logger.LogWriteLine(string.Format("    Scenario:    {0}", currentScenario));
                                        Logger.LogWriteLine("    Exception:   " + ex.ToString());

                                        if (_usingTraceController)
                                        {
                                            Logger.LogWriteLine("   Trace has been discarded");
                                        }

                                        Logger.LogWriteLine("-------------------------------------------------------");
                                    }
                                    finally
                                    {
                                        if (_usingTraceController)
                                        {
                                            Logger.LogWriteLine("  Pausing between tracing sessions to reduce interference.");

                                            // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures
                                            // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs.
                                            Thread.Sleep(_e3RefreshDelaySeconds * 1000);
                                        }
                                    }
                                }
                            }

                            if (passSucceeded)
                            {
                                elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} {browser}").Wait();
                            }
                            else
                            {
                                CleanupExtensions();
                                Logger.LogWriteLine(string.Format("!!! Failed to successfully complete iteration {0} with browser '{1}' after {2} attempts!", iteration, browser, _maxAttempts));
                                throw new Exception(string.Format("!!! Failed to successfully complete iteration {0} with browser '{1}' after {2} attempts!", iteration, browser, _maxAttempts));
                            }
                        }
                    }
                }

                CleanupExtensions();
                Logger.LogWriteLine("Completed Test Pass");
                elevatorClient.SendControllerMessageAsync(Elevator.Commands.END_PASS).Wait();
            }
        }