/// <summary> /// Saves a Selenium screenshot to a file with automatically generated name. /// </summary> /// <param name="screenshot">The Selenium screenshot</param> /// <param name="data">The BotData used for path creation</param> public static void SaveScreenshot(OpenQA.Selenium.Screenshot screenshot, BotData data) { var path = MakeScreenshotPath(data); data.Screenshots.Add(path); screenshot.SaveAsFile(path); }
public void takeScreenShot() { OpenQA.Selenium.Screenshot screen = driver.TakeScreenshot(); string filename = DateTime.UtcNow.ToString("yyyy-MM-dd-mm-ss") + ".jpeg"; string filpath = @"N:\Reports\" + filename; screen.SaveAsFile(filpath, OpenQA.Selenium.ScreenshotImageFormat.Jpeg); }
public void GetElementScreenshot() { WindowsDriver <WindowsElement> desktopSession = null; try { // Locate the AlarmPivotItem element in Alarms & Clock app to be captured WindowsElement alarmPivotItem1 = session.FindElementByAccessibilityId("AlarmPivotItem"); OpenQA.Selenium.Screenshot alarmPivotItemScreenshot1 = alarmPivotItem1.GetScreenshot(); // Save the AlarmPivotItem screenshot capture locally on the machine running the test alarmPivotItemScreenshot1.SaveAsFile(@"ScreenshotAlarmPivotItem.png", ImageFormat.Png); // Using the Desktop session, locate the same AlarmPivotItem element in Alarms & Clock app to be captured desktopSession = Utility.CreateNewSession(CommonTestSettings.DesktopAppId); WindowsElement alarmPivotItem2 = desktopSession.FindElementByAccessibilityId("AlarmPivotItem"); OpenQA.Selenium.Screenshot alarmPivotItemScreenshot2 = alarmPivotItem2.GetScreenshot(); // Using the Desktop session, locate the Alarms & Clock app top level window to be captured WindowsElement alarmsClockWindowTopWindow = desktopSession.FindElementByName("Alarms & Clock"); OpenQA.Selenium.Screenshot alarmsClockWindowTopWindowScreenshot = alarmsClockWindowTopWindow.GetScreenshot(); using (MemoryStream msScreenshot1 = new MemoryStream(alarmPivotItemScreenshot1.AsByteArray)) using (MemoryStream msScreenshot2 = new MemoryStream(alarmPivotItemScreenshot2.AsByteArray)) using (MemoryStream msScreenshot3 = new MemoryStream(alarmsClockWindowTopWindowScreenshot.AsByteArray)) { // Verify that the element screenshot has a valid size Image screenshotImage1 = Image.FromStream(msScreenshot1); Assert.AreEqual(alarmPivotItem1.Size.Height, screenshotImage1.Height); Assert.AreEqual(alarmPivotItem1.Size.Width, screenshotImage1.Width); // Verify that the element screenshot captured using the Alarms & Clock session // is identical in size with the one taken using the desktop session Image screenshotImage2 = Image.FromStream(msScreenshot2); Assert.AreEqual(screenshotImage1.Height, screenshotImage2.Height); Assert.AreEqual(screenshotImage1.Width, screenshotImage2.Width); // Verify that the element screenshot is smaller in size compared to the application top level window Image screenshotImage3 = Image.FromStream(msScreenshot3); Assert.AreEqual(alarmsClockWindowTopWindow.Size.Height, screenshotImage3.Height); Assert.AreEqual(alarmsClockWindowTopWindow.Size.Width, screenshotImage3.Width); Assert.IsTrue(screenshotImage3.Height > screenshotImage1.Height); Assert.IsTrue(screenshotImage3.Width > screenshotImage1.Width); } } finally { if (desktopSession != null) { desktopSession.Quit(); } } }
// // TakeScreenshot /////////////////////////////////////////////////////////////////////////////////// protected static String TakeScreenshot(String imageName) { String path = TestBase.ProjectPath + "\\Logs\\" + imageName; OpenQA.Selenium.Screenshot ss = ((OpenQA.Selenium.ITakesScreenshot)TestBase.WebDriver).GetScreenshot(); String screenshot = ss.AsBase64EncodedString; byte[] screenshotAsByteArray = ss.AsByteArray; ss.SaveAsFile(path, OpenQA.Selenium.ScreenshotImageFormat.Png); return(path); }
// This method is the one actually responsible for executing each scenario in the workload // It includes error checking and retry attempts as well as controls for starting and stopping ETL tracing via ElevatorServer.exe private bool ExecuteWorkload(int iteration, string browser, string measureSetName, string wprProfileName, string tracingMode, bool overrideTimeout, bool usingTraceController, IElevatorClient elevatorClient) { bool passSucceeded = false; for (int attemptNumber = 0; attemptNumber < _maxAttempts && !passSucceeded; attemptNumber++) { int scenarioIndex = 0; if (attemptNumber > 0) { Logger.LogWriteLine(" Attempting again..."); } string workloadName = _workloadName; if (_enableScenarioTracing) { // Capturing a trace per each scenario of the workload. Append the scenario name and index to the workloadname for documentation purposes. workloadName = _workloadName + "-0-" + _scenarios[0].ScenarioName; } // Start tracing elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} {browser} ITERATION {iteration} SCENARIO_NAME {workloadName} WPRPROFILE {wprProfileName} MODE {tracingMode}").Wait(); if (!string.IsNullOrEmpty(_executeScriptFileName)) { // execute the selected script and pass "STARTSCENARIO"+<scenario>+<iteration> ExecuteScript(_executeScriptFileName, $"STARTSCENARIO {_scenarios[0].ScenarioName} {iteration.ToString()}"); } if (usingTraceController) { Logger.LogWriteLine($" Pausing {_e3RefreshDelaySeconds} seconds after starting the trace session to reduce interference."); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs. Thread.Sleep(_e3RefreshDelaySeconds * 1000); } Logger.LogWriteLine($" Launching Browser Driver: '{browser}'"); ScenarioEventSourceProvider.EventLog.WorkloadStart(_workloadName, browser, wprProfileName, iteration, attemptNumber); using (var driver = RemoteWebDriverExtension.CreateDriverAndMaximize(browser, _clearBrowserCache, _enableVerboseLogging, _browserProfilePath, _extensionsPaths, _port, _hostName)) { string currentScenarioName = ""; try { Stopwatch watch = Stopwatch.StartNew(); bool isFirstScenario = true; _timer.SetDriver(driver); foreach (var currentScenario in _scenarios) { if (_enableScenarioTracing && scenarioIndex > 0) { // Capturing a trace per each scenario of the workload. // Stop and save the current trace session which is for the last scenario. elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} {browser}").Wait(); // let's just wait a few seconds before starting the next trace Thread.Sleep(3000); // Append the scenario name and index to the workloadname for documentation purposes. workloadName = _workloadName + "-" + scenarioIndex + "-" + currentScenario.ScenarioName; // Start tracing for the current scenario elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} {browser} ITERATION {iteration} SCENARIO_NAME {workloadName} WPRPROFILE {wprProfileName} MODE {tracingMode}").Wait(); } if (!string.IsNullOrEmpty(_executeScriptFileName) && scenarioIndex > 0) { // execute the selected script and pass "ENDSCENARIO" ExecuteScript(_executeScriptFileName, "ENDSCENARIO"); // execute the selected script and pass "STARTSCENARIO"+<scenario>+<iteration> ExecuteScript(_executeScriptFileName, $"STARTSCENARIO {currentScenario.ScenarioName} {iteration.ToString()}"); } // Save the name of the current scenarion in case an exception is thrown in which case the local variable 'currentScenario' will be lost currentScenarioName = currentScenario.ScenarioName; _timer.SetScenario(currentScenario.ScenarioName); // We want every scenario to take the same amount of time total, even if there are changes in // how long pages take to load. The biggest reason for this is so that you can measure energy // or power and their ratios will be the same either way. // So start by getting the current time. var startTime = watch.Elapsed; // The first scenario naviagates in the browser's new tab / welcome page. // After that, scenarios open in their own tabs if (!isFirstScenario && currentScenario.Tab == "new") { driver.CreateNewTab(); } isFirstScenario = false; Logger.LogWriteLine($" Executing - Scenario: {currentScenario.ScenarioName} Iteration: {iteration} Attempt: {attemptNumber} Browser: {browser} MeasureSet: {measureSetName}"); ScenarioEventSourceProvider.EventLog.ScenarioExecutionStart(browser, currentScenario.ScenarioName); // Here, control is handed to the scenario to navigate, and do whatever it wants currentScenario.Scenario.Run(driver, browser, _logins, _timer); // When we get control back, we sleep for the remaining time for the scenario. This ensures // the total time for a scenario is always the same var runTime = watch.Elapsed.Subtract(startTime); var timeLeft = TimeSpan.FromSeconds(currentScenario.Duration).Subtract(runTime); if (timeLeft < TimeSpan.FromSeconds(0) && !overrideTimeout) { // Of course it's possible we don't get control back until after we were supposed to // continue to the next scenario. In that case, invalidate the run by throwing. Logger.LogWriteLine($" !!! Scenario {currentScenario.ScenarioName} ran longer than expected! The browser ran for {runTime.TotalSeconds}s. The timeout for this scenario is {currentScenario.Duration}s."); throw new Exception($"Scenario {currentScenario.ScenarioName} ran longer than expected! The browser ran for {runTime.TotalSeconds}s. The timeout for this scenario is {currentScenario.Duration}s."); } else if (!overrideTimeout) { ScenarioEventSourceProvider.EventLog.ScenarioIdleStart(currentScenario.ScenarioName, timeLeft.TotalSeconds); Logger.LogWriteLine($" Scenario {currentScenario.ScenarioName} returned in {runTime.TotalSeconds} seconds. Sleep for remaining {timeLeft.TotalSeconds} seconds."); Thread.Sleep((int)timeLeft.TotalMilliseconds); ScenarioEventSourceProvider.EventLog.ScenarioIdleStop(currentScenario.ScenarioName, timeLeft.TotalSeconds); } ScenarioEventSourceProvider.EventLog.ScenarioExecutionStop(browser, currentScenario.ScenarioName); Logger.LogWriteLine($" Completed - Scenario: {currentScenario.ScenarioName} Iteration: {iteration} Attempt: {attemptNumber} Browser: {browser} MeasureSet: {measureSetName}"); scenarioIndex++; } driver.CloseBrowser(browser); passSucceeded = true; Logger.LogWriteLine($" SUCCESS! Completed Browser: {browser} Iteration: {iteration} Attempt: {attemptNumber} MeasureSet: {measureSetName}"); ScenarioEventSourceProvider.EventLog.WorkloadStop(_workloadName, browser, wprProfileName, iteration, attemptNumber); } catch (Exception ex) { // If something goes wrong and we get an exception halfway through the scenario, we clean up // and put everything back into a state where we can start the next iteration. elevatorClient.SendControllerMessageAsync(Elevator.Commands.CANCEL_PASS); if (!string.IsNullOrEmpty(_executeScriptFileName)) { // execute the selected script and pass "FAIL" ExecuteScript(_executeScriptFileName, "FAIL"); } try { // Attempt to save the page source string pageSourceFileName = string.Format("pageSource_{0}_{1}_{2}_{3}_{4}.html", browser, currentScenarioName, iteration, measureSetName, attemptNumber); pageSourceFileName = Path.Combine(_etlPath, pageSourceFileName); using (StreamWriter sw = new StreamWriter(pageSourceFileName, false)) { sw.WriteLine(driver.PageSource); } // Attempt to save a screenshot OpenQA.Selenium.Screenshot screenshot = driver.GetScreenshot(); string imageFileName = string.Format("screenshot_{0}_{1}_{2}_{3}_{4}.png", browser, currentScenarioName, iteration, measureSetName, attemptNumber); imageFileName = Path.Combine(_etlPath, imageFileName); screenshot.SaveAsFile(imageFileName, OpenQA.Selenium.ScreenshotImageFormat.Png); } catch (Exception) { // ignore this exception as we were just trying to see if we could get a screenshot and pagesource for the original exception. } driver.CloseBrowser(browser); Logger.LogWriteLine("------ EXCEPTION caught while trying to run scenario! ------------------------------------"); Logger.LogWriteLine($" Iteration: {iteration}"); Logger.LogWriteLine($" Measure Set: {measureSetName}"); Logger.LogWriteLine($" Browser: {browser}"); Logger.LogWriteLine($" Attempt: {attemptNumber}"); Logger.LogWriteLine($" Scenario: {currentScenarioName}"); Logger.LogWriteLine($" Exception: {ex.ToString()}"); if (usingTraceController) { Logger.LogWriteLine(" Trace has been discarded"); } Logger.LogWriteLine("-------------------------------------------------------"); } finally { if (usingTraceController) { Logger.LogWriteLine($" Pausing {_e3RefreshDelaySeconds} seconds before stopping the trace session to reduce interference."); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs. Thread.Sleep(_e3RefreshDelaySeconds * 1000); } } } } if (passSucceeded) { // Stop tracing elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} {browser}").Wait(); if (!string.IsNullOrEmpty(_executeScriptFileName)) { // execute the selected script and pass "ENDSCENARIO" ExecuteScript(_executeScriptFileName, "ENDSCENARIO"); } } else { CleanupExtensions(); Logger.LogWriteLine($"!!! Failed to successfully complete iteration {iteration} with browser '{browser}' after {_maxAttempts} attempts!"); throw new Exception($"!!! Failed to successfully complete iteration {iteration} with browser '{browser}' after {_maxAttempts} attempts!"); } return(passSucceeded); }
/// <summary> /// The main loop of the class. This method will run through the specified number of iterations on all the /// specified browsers across all the specified scenarios. /// </summary> public void Run() { LogOsVersion(); if (_useTimer) { _timer.Enable(); } if (_usingTraceController) { Logger.LogWriteLine("Pausing before starting first tracing session to reduce interference."); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 going forward is from this test run and not from before running the test pass. Thread.Sleep(_e3RefreshDelaySeconds * 1000); } using (var elevatorClient = ElevatorClient.Create(_usingTraceController)) { elevatorClient.ConnectAsync().Wait(); elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_PASS} {_etlPath}").Wait(); Logger.LogWriteLine("Starting Test Pass"); // Core Execution Loop // TODO: Consider breaking up this large loop into smaller methods to ease readability. for (int iteration = 0; iteration < _iterations; iteration++) { Logger.LogWriteLine(string.Format("Iteration: {0} ------------------", iteration)); _timer.SetIteration(iteration); foreach (var currentMeasureSet in _measureSets) { if (_captureBaseline && _usingTraceController) { // capture a baseline of the system for this measureset // A baseline is where we capture measureset data of the system but without running the browser and test pass. // The idea is to get a baseline performance capture of the system without the browser and test pass so it // can be used as a comparison. Logger.LogWriteLine(string.Format(" Starting capture of system baseline for {0} seconds - measureset {1} iteration {2}", _baselineCaptureSeconds, currentMeasureSet.Value.Item1, iteration)); // Start the trace capture elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} BASE ITERATION {iteration} SCENARIO_NAME BaseLineCapture WPRPROFILE {currentMeasureSet.Value.Item1} MODE {currentMeasureSet.Value.Item2}").Wait(); Thread.Sleep(_baselineCaptureSeconds * 1000); Logger.LogWriteLine(string.Format(" Finished capture of system baseline of measureset {0} iteration {1}", currentMeasureSet.Value.Item1, iteration)); elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} BASE").Wait(); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs. Logger.LogWriteLine(" Pausing between tracing sessions to reduce interference."); Thread.Sleep(_e3RefreshDelaySeconds * 1000); } _timer.SetMeasureSet(currentMeasureSet.Key); // Randomize the order the browsers each iteration to reduce systematic bias in the test Random rand = new Random(); _browsers = _browsers.OrderBy(a => rand.Next()).ToList <String>(); foreach (string browser in _browsers) { _timer.SetBrowser(browser, _extensionsNameAndVersion); bool passSucceeded = false; for (int attemptNumber = 0; attemptNumber < _maxAttempts && !passSucceeded; attemptNumber++) { if (attemptNumber > 0) { Logger.LogWriteLine(" Attempting again..."); } elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} {browser} ITERATION {iteration} SCENARIO_NAME {_scenarioName} WPRPROFILE {currentMeasureSet.Value.Item1} MODE {currentMeasureSet.Value.Item2}").Wait(); Logger.LogWriteLine(string.Format(" Launching Browser Driver: '{0}'", browser)); ScenarioEventSourceProvider.EventLog.WorkloadStart(_scenarioName, browser, currentMeasureSet.Value.Item1, iteration, attemptNumber); using (var driver = RemoteWebDriverExtension.CreateDriverAndMaximize(browser, _browserProfilePath, _extensionsPaths)) { string currentScenario = ""; try { Stopwatch watch = Stopwatch.StartNew(); bool isFirstScenario = true; _timer.SetDriver(driver); foreach (var scenario in _scenarios) { currentScenario = scenario.ScenarioName; _timer.SetScenario(scenario.ScenarioName); // We want every scenario to take the same amount of time total, even if there are changes in // how long pages take to load. The biggest reason for this is so that you can measure energy // or power and their ratios will be the same either way. // So start by getting the current time. var startTime = watch.Elapsed; // The first scenario naviagates in the browser's new tab / welcome page. // After that, scenarios open in their own tabs if (!isFirstScenario && scenario.Tab == "new") { driver.CreateNewTab(); } else { isFirstScenario = false; } Logger.LogWriteLine(string.Format(" Executing - Scenario: {0} Iteration: {1} Attempt: {2} Browser: {3} MeasureSet: {4}", scenario.Scenario.Name, iteration, attemptNumber, browser, currentMeasureSet.Key)); ScenarioEventSourceProvider.EventLog.ScenarioExecutionStart(browser, scenario.Scenario.Name); // Here, control is handed to the scenario to navigate, and do whatever it wants scenario.Scenario.Run(driver, browser, _logins, _timer); ScenarioEventSourceProvider.EventLog.ScenarioExecutionStop(browser, scenario.Scenario.Name); // When we get control back, we sleep for the remaining time for the scenario. This ensures // the total time for a scenario is always the same var runTime = watch.Elapsed.Subtract(startTime); var timeLeft = TimeSpan.FromSeconds(scenario.Duration).Subtract(runTime); if (timeLeft < TimeSpan.FromSeconds(0) && !_overrideTimeout) { // Of course it's possible we don't get control back until after we were supposed to // continue to the next scenario. In that case, invalidate the run by throwing. Logger.LogWriteLine(string.Format(" !!! Scenario {0} ran longer than expected! The browser ran for {1}s. The timeout for this scenario is {2}s.", scenario.Scenario.Name, runTime.TotalSeconds, scenario.Duration)); throw new Exception(string.Format("Scenario {0} ran longer than expected! The browser ran for {1}s. The timeout for this scenario is {2}s.", scenario.Scenario.Name, runTime.TotalSeconds, scenario.Duration)); } else if (!_overrideTimeout) { Logger.LogWriteLine(string.Format(" Scenario {0} returned in {1} seconds. Sleep for remaining {2} seconds.", scenario.Scenario.Name, runTime.TotalSeconds, timeLeft.TotalSeconds)); driver.Wait(timeLeft.TotalSeconds); } Logger.LogWriteLine(string.Format(" Completed - Scenario: {0} Iteration: {1} Attempt: {2} Browser: {3} MeasureSet: {4}", scenario.Scenario.Name, iteration, attemptNumber, browser, currentMeasureSet.Key, runTime.TotalSeconds)); } driver.CloseBrowser(browser); passSucceeded = true; Logger.LogWriteLine(string.Format(" SUCCESS! Completed Browser: {0} Iteration: {1} Attempt: {2} MeasureSet: {3}", browser, iteration, attemptNumber, currentMeasureSet.Key)); ScenarioEventSourceProvider.EventLog.WorkloadStop(_scenarioName, browser, currentMeasureSet.Value.Item1, iteration, attemptNumber); } catch (Exception ex) { // If something goes wrong and we get an exception halfway through the scenario, we clean up // and put everything back into a state where we can start the next iteration. elevatorClient.SendControllerMessageAsync(Elevator.Commands.CANCEL_PASS); try { // Attempt to save the page source string pageSourceFileName = string.Format("pageSource_{0}_{1}_{2}_{3}_{4}.html", browser, currentScenario, iteration, currentMeasureSet.Key, attemptNumber); pageSourceFileName = Path.Combine(_etlPath, pageSourceFileName); using (StreamWriter sw = new StreamWriter(pageSourceFileName, false)) { sw.WriteLine(driver.PageSource); } // Attempt to save a screenshot OpenQA.Selenium.Screenshot screenshot = driver.GetScreenshot(); string imageFileName = string.Format("screenshot_{0}_{1}_{2}_{3}_{4}.png", browser, currentScenario, iteration, currentMeasureSet.Key, attemptNumber); imageFileName = Path.Combine(_etlPath, imageFileName); screenshot.SaveAsFile(imageFileName, OpenQA.Selenium.ScreenshotImageFormat.Png); } catch (Exception) { // ignore this exception as we were just trying to see if we could get a screenshot and pagesource for the original exception. } driver.CloseBrowser(browser); Logger.LogWriteLine("------ EXCEPTION caught while trying to run scenario! ------------------------------------"); Logger.LogWriteLine(string.Format(" Iteration: {0}", iteration)); Logger.LogWriteLine(string.Format(" Measure Set: {0}", currentMeasureSet.Key)); Logger.LogWriteLine(string.Format(" Browser: {0}", browser)); Logger.LogWriteLine(string.Format(" Attempt: {0}", attemptNumber)); Logger.LogWriteLine(string.Format(" Scenario: {0}", currentScenario)); Logger.LogWriteLine(" Exception: " + ex.ToString()); if (_usingTraceController) { Logger.LogWriteLine(" Trace has been discarded"); } Logger.LogWriteLine("-------------------------------------------------------"); } finally { if (_usingTraceController) { Logger.LogWriteLine(" Pausing between tracing sessions to reduce interference."); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs. Thread.Sleep(_e3RefreshDelaySeconds * 1000); } } } } if (passSucceeded) { elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} {browser}").Wait(); } else { CleanupExtensions(); Logger.LogWriteLine(string.Format("!!! Failed to successfully complete iteration {0} with browser '{1}' after {2} attempts!", iteration, browser, _maxAttempts)); throw new Exception(string.Format("!!! Failed to successfully complete iteration {0} with browser '{1}' after {2} attempts!", iteration, browser, _maxAttempts)); } } } } CleanupExtensions(); Logger.LogWriteLine("Completed Test Pass"); elevatorClient.SendControllerMessageAsync(Elevator.Commands.END_PASS).Wait(); } }
// This method is the one actually responsible for executing each scenario in the workload // It includes error checking and retry attempts as well as controls for starting and stopping ETL tracing via ElevatorServer.exe private bool ExecuteWorkload(int iteration, string browser, string measureSetName, string wprProfileName, string tracingMode, bool overrideTimeout, bool usingTraceController, IElevatorClient elevatorClient) { Logger.LogWriteLine(string.Format(" ExecuteWorkload for {0} seconds - measureset {1} iteration {2} wprProfileName {3} tracingMode {4}", _baselineCaptureSeconds, measureSetName, iteration, wprProfileName, tracingMode)); bool passSucceeded = false; for (int attemptNumber = 0; attemptNumber < _maxAttempts && !passSucceeded; attemptNumber++) { int scenarioIndex = 0; if (attemptNumber > 0) { Logger.LogWriteLine(" Attempting again..."); } string workloadName = _workloadName; if (_enableScenarioTracing) { // Capturing a trace per each scenario of the workload. Append the scenario name and index to the workloadname for documentation purposes. workloadName = _workloadName + "-0-" + _scenarios[0].ScenarioName; } var workloadDuration = _pauseAfterBrowserStart; foreach (WorkloadScenario s in _scenarios) { workloadDuration += s.Duration; } if (_enableScenarioTracing) { workloadDuration += _pauseBetweenScenariosRun * _scenarios.Count; } // Start tracing if (_disableWpr) { tracingMode = "DISABLED"; } if (!_enableScenarioTracing) // To skip tracing browser start { elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} {browser} ITERATION {iteration} SCENARIO_NAME {workloadName} WPRPROFILE {wprProfileName} MODE {tracingMode} DURATION {workloadDuration + _e3RefreshDelaySeconds}").Wait(); } if (usingTraceController) { Logger.LogWriteLine(string.Format(" Pausing {0} seconds after starting the trace session to reduce interference.", _e3RefreshDelaySeconds)); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs. Thread.Sleep(_e3RefreshDelaySeconds * 1000); } Logger.LogWriteLine(string.Format(" Launching Browser Driver: '{0}'", browser)); ScenarioEventSourceProvider.EventLog.WorkloadStart(_workloadName, browser, wprProfileName, iteration, attemptNumber); using (var driver = RemoteWebDriverExtension.CreateDriverAndMaximize(browser, _clearBrowserCache, _enableVerboseLogging, _browserProfilePath, _extensionsPaths, _port, _hostName, _enableBrowserTracing, _windowMode, _broArgs)) { Logger.LogWriteLine($" Pause {_pauseAfterBrowserStart} seconds after browser start to reduce interference"); Thread.Sleep(_pauseAfterBrowserStart * 1000); string currentScenarioName = ""; try { Stopwatch watch = Stopwatch.StartNew(); bool isFirstScenario = true; _timer.SetDriver(driver); InfiniteLoop: foreach (var currentScenario in _scenarios) { // Set up scenario environment currentScenario.Scenario.SetUp(driver); if (_enableScenarioTracing) { // Capturing a trace per each scenario of the workload. // Stop and save the current trace session which is for the last scenario. elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} {browser}").Wait(); // let's just wait a few seconds before starting the next trace Thread.Sleep(_pauseBetweenScenariosRun * 1000); // Append the scenario name and index to the workloadname for documentation purposes. workloadName = _workloadName + "-" + scenarioIndex + "-" + currentScenario.ScenarioName; // Start tracing for the current scenario elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.START_BROWSER} {browser} ITERATION {iteration} SCENARIO_NAME {workloadName} WPRPROFILE {wprProfileName} MODE {tracingMode} DURATION {currentScenario.Duration}").Wait(); } // Save the name of the current scenarion in case an exception is thrown in which case the local variable 'currentScenario' will be lost currentScenarioName = currentScenario.ScenarioName; _timer.SetScenario(currentScenario.ScenarioName); // We want every scenario to take the same amount of time total, even if there are changes in // how long pages take to load. The biggest reason for this is so that you can measure energy // or power and their ratios will be the same either way. // So start by getting the current time. var startTime = watch.Elapsed; // The first scenario naviagates in the browser's new tab / welcome page. // After that, scenarios open in their own tabs if (!isFirstScenario && currentScenario.Tab == "new") { driver.CreateNewTab(); } isFirstScenario = false; Logger.LogWriteLine(string.Format(" Executing - Scenario: {0} Iteration: {1} Attempt: {2} Browser: {3} MeasureSet: {4}", currentScenario.ScenarioName, iteration, attemptNumber, browser, measureSetName)); ScenarioEventSourceProvider.EventLog.ScenarioExecutionStart(browser, currentScenario.ScenarioName); CancellationTokenSource ctx = null; Task <List <OpenQA.Selenium.LogEntry> > browserTraceDumpTask = null; // Start polling chromedriver for logs if (_enableBrowserTracing && (browser == "chrome" || browser == "chromium" || browser == "yabro" || browser == "brodefault")) { if (_enableScenarioTracing) { RemoteWebDriverExtension.FlushChromeDriverLogs(driver); // Drop browser traces recorded before scenario run } ctx = new CancellationTokenSource(); browserTraceDumpTask = RemoteWebDriverExtension.GetChromeDriverLogs(driver, ctx); } // Here, control is handed to the scenario to navigate, and do whatever it wants currentScenario.Scenario.Run(driver, browser, _logins, _timer); // When we get control back, we sleep for the remaining time for the scenario. This ensures // the total time for a scenario is always the same var runTime = watch.Elapsed.Subtract(startTime); var timeLeft = TimeSpan.FromSeconds(currentScenario.Duration).Subtract(runTime); if (timeLeft < TimeSpan.FromSeconds(0) && !overrideTimeout) { // Of course it's possible we don't get control back until after we were supposed to // continue to the next scenario. In that case, invalidate the run by throwing. Logger.LogWriteLine(string.Format(" !!! Scenario {0} ran longer than expected! The browser ran for {1}s. The timeout for this scenario is {2}s.", currentScenario.ScenarioName, runTime.TotalSeconds, currentScenario.Duration)); throw new Exception(string.Format("Scenario {0} ran longer than expected! The browser ran for {1}s. The timeout for this scenario is {2}s.", currentScenario.ScenarioName, runTime.TotalSeconds, currentScenario.Duration)); } else if (!overrideTimeout) { Logger.LogWriteLine(string.Format(" Scenario {0} returned in {1} seconds. Sleep for remaining {2} seconds.", currentScenario.ScenarioName, runTime.TotalSeconds, timeLeft.TotalSeconds)); driver.Wait(timeLeft.TotalSeconds, "ScenarioWait"); } // wait same amount of seconds to avoid overlapping of tracing with Scenario TearDown Thread.Sleep(_pauseBetweenScenariosRun * 1000); // Tear down scenario environment currentScenario.Scenario.TearDown(driver); // wait for TearDown // Thread.Sleep(_pauseBetweenScenariosRun * 1000); // Stop polling chromedriver logs if (ctx != null) { ctx.Cancel(); string browserTraceFile = Path.Combine(_etlPath, $"{browser}_{currentScenario.ScenarioName}_{iteration}_trace_DATE_DATE.json"); var browserLogs = browserTraceDumpTask.Result; RemoteWebDriverExtension.DumpChromeDriverLogs(browserTraceFile, browserLogs); } SaveBenchmarkResult(currentScenario.Scenario, browser, _etlPath); ScenarioEventSourceProvider.EventLog.ScenarioExecutionStop(browser, currentScenario.ScenarioName); Logger.LogWriteLine(string.Format(" Completed - Scenario: {0} Iteration: {1} Attempt: {2} Browser: {3} MeasureSet: {4}", currentScenario.ScenarioName, iteration, attemptNumber, browser, measureSetName, runTime.TotalSeconds)); scenarioIndex++; } if (_infiniteLoop) { var tabs = driver.WindowHandles; foreach (var tab in tabs) { if (tabs[0] != tab) { driver.SwitchTab(tab); driver.Close(); } } driver.SwitchTab(tabs[0]); driver.SwitchTo().DefaultContent(); goto InfiniteLoop; } Logger.LogWriteLine(" Wait 3s before closing browser for finishing all measurements"); Thread.Sleep(3000); driver.CloseBrowser(browser); passSucceeded = true; Logger.LogWriteLine(string.Format(" SUCCESS! Completed Browser: {0} Iteration: {1} Attempt: {2} MeasureSet: {3}", browser, iteration, attemptNumber, measureSetName)); ScenarioEventSourceProvider.EventLog.WorkloadStop(_workloadName, browser, wprProfileName, iteration, attemptNumber); } catch (Exception ex) { // If something goes wrong and we get an exception halfway through the scenario, we clean up // and put everything back into a state where we can start the next iteration. elevatorClient.SendControllerMessageAsync(Elevator.Commands.CANCEL_PASS); try { // Attempt to save the page source string pageSourceFileName = string.Format("pageSource_{0}_{1}_{2}_{3}_{4}.html", browser, currentScenarioName, iteration, measureSetName, attemptNumber); pageSourceFileName = Path.Combine(_etlPath, pageSourceFileName); using (StreamWriter sw = new StreamWriter(pageSourceFileName, false)) { sw.WriteLine(driver.PageSource); } // Attempt to save a screenshot OpenQA.Selenium.Screenshot screenshot = driver.GetScreenshot(); string imageFileName = string.Format("screenshot_{0}_{1}_{2}_{3}_{4}.png", browser, currentScenarioName, iteration, measureSetName, attemptNumber); imageFileName = Path.Combine(_etlPath, imageFileName); screenshot.SaveAsFile(imageFileName, OpenQA.Selenium.ScreenshotImageFormat.Png); } catch (Exception) { // ignore this exception as we were just trying to see if we could get a screenshot and pagesource for the original exception. } driver.CloseBrowser(browser); Logger.LogWriteLine("------ EXCEPTION caught while trying to run scenario! ------------------------------------"); Logger.LogWriteLine(string.Format(" Iteration: {0}", iteration)); Logger.LogWriteLine(string.Format(" Measure Set: {0}", measureSetName)); Logger.LogWriteLine(string.Format(" Browser: {0}", browser)); Logger.LogWriteLine(string.Format(" Attempt: {0}", attemptNumber)); Logger.LogWriteLine(string.Format(" Scenario: {0}", currentScenarioName)); Logger.LogWriteLine(" Exception: " + ex.ToString()); if (usingTraceController) { Logger.LogWriteLine(" Trace has been discarded"); } Logger.LogWriteLine("-------------------------------------------------------"); } finally { if (usingTraceController) { Logger.LogWriteLine(string.Format(" Pausing {0} seconds before stopping the trace session to reduce interference.", _e3RefreshDelaySeconds)); // E3 system aggregates energy data at regular intervals. For our test passes we use 10 second intervals. Waiting here for 12 seconds before continuing ensures // that the browser energy data reported by E3 for this run is only for this run and does not bleed into any other runs. Thread.Sleep(_e3RefreshDelaySeconds * 1000); } } } } if (passSucceeded) { // Stop tracing elevatorClient.SendControllerMessageAsync($"{Elevator.Commands.END_BROWSER} {browser}").Wait(); } else { CleanupExtensions(); Logger.LogWriteLine(string.Format("!!! Failed to successfully complete iteration {0} with browser '{1}' after {2} attempts!", iteration, browser, _maxAttempts)); throw new Exception(string.Format("!!! Failed to successfully complete iteration {0} with browser '{1}' after {2} attempts!", iteration, browser, _maxAttempts)); } return(passSucceeded); }
public void GetScreenshot() { WindowsDriver <WindowsElement> notepadSession = null; WindowsDriver <WindowsElement> desktopSession = null; try { // Launch and capture a screenshot of a maximized Notepad application. The steps below intentionally use // the Notepad application window to fully cover the Alarms & Clock application. This setup demonstrates // that capturing Alarms & Clock screenshot afterward will implicitly bring its window to foreground. notepadSession = Utility.CreateNewSession(CommonTestSettings.NotepadAppId); notepadSession.Manage().Window.Maximize(); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(1)); OpenQA.Selenium.Screenshot notepadScreenshot = notepadSession.GetScreenshot(); // Capture a screenshot of Alarms & Clock application // This implicitly brings the application window to foreground OpenQA.Selenium.Screenshot alarmsClockScreenshot = session.GetScreenshot(); // Save the application screenshot capture locally on the machine running the test alarmsClockScreenshot.SaveAsFile(@"ScreenshotAlarmsClockApplication.png", ImageFormat.Png); // Capture the entire desktop using the Desktop session desktopSession = Utility.CreateNewSession(CommonTestSettings.DesktopAppId); OpenQA.Selenium.Screenshot desktopScreenshot = desktopSession.GetScreenshot(); // Save the desktop screenshot capture locally on the machine running the test desktopScreenshot.SaveAsFile(@"ScreenshotDesktop.png", ImageFormat.Png); using (MemoryStream msScreenshot1 = new MemoryStream(alarmsClockScreenshot.AsByteArray)) using (MemoryStream msScreenshot2 = new MemoryStream(notepadScreenshot.AsByteArray)) using (MemoryStream msScreenshot3 = new MemoryStream(desktopScreenshot.AsByteArray)) { // Verify that the Alarms & Clock application screenshot has a valid size Image screenshotImage1 = Image.FromStream(msScreenshot1); Assert.AreEqual(session.Manage().Window.Size.Height, screenshotImage1.Height); Assert.AreEqual(session.Manage().Window.Size.Width, screenshotImage1.Width); // Verify that the maximized Notepad application screenshot has a valid size Image screenshotImage2 = Image.FromStream(msScreenshot2); Assert.AreEqual(notepadSession.Manage().Window.Size.Height, screenshotImage2.Height); Assert.AreEqual(notepadSession.Manage().Window.Size.Width, screenshotImage2.Width); // Verify that the application screenshot is smaller in size compared to the entire desktop Image screenshotImage3 = Image.FromStream(msScreenshot3); Assert.IsTrue(screenshotImage2.Height >= screenshotImage1.Height); Assert.IsTrue(screenshotImage2.Width >= screenshotImage1.Width); } } finally { if (notepadSession != null) { notepadSession.Quit(); } if (desktopSession != null) { desktopSession.Quit(); } } }