Merge pull request #875 from seleniumbase/combine-dashboard-with-parallel-tests

Allow parallel tests with "-n=NUM" when using "--dashboard" mode, and more
This commit is contained in:
Michael Mintz 2021-04-16 03:06:25 -04:00 committed by GitHub
commit 24e18abb0f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 172 additions and 45 deletions

4
.gitignore vendored
View File

@ -99,6 +99,9 @@ report.xml
# Dashboard
dashboard.html
dashboard.json
dash_pie.json
dashboard.lock
# Allure Reports / Results
allure_report
@ -134,6 +137,7 @@ docs/*/*/*/*.md
# Other
selenium-server-standalone.jar
proxy.zip
proxy.lock
verbose_hub_server.dat
verbose_node_server.dat
ip_of_grid_hub.dat

View File

@ -7,7 +7,7 @@ docutils==0.17
Jinja2==2.11.3
readme-renderer==29.0
pymdown-extensions==8.1.1
importlib-metadata==3.10.0;python_version>="3.6"
importlib-metadata==3.10.1;python_version>="3.6"
lunr==0.5.8
mkdocs==1.1.2
mkdocs-material==7.1.1

View File

@ -53,6 +53,7 @@ except (ImportError, ValueError):
sb.use_auto_ext = False
sb.no_sandbox = False
sb.disable_gpu = False
sb._multithreaded = False
sb._reuse_session = False
sb._crumbs = False
sb.visual_baseline = False

View File

@ -9,7 +9,7 @@
--------
<b>SeleniumBase</b> supports 17 different syntax formats for structuring tests. (<i>The first 6 are the most common.</i>)
<b>SeleniumBase</b> supports 17 different syntax formats (<i>design patterns</i>) for structuring tests. (<i>The first 6 are the most common.</i>)
<h3><img src="https://seleniumbase.io/img/green_logo.png" title="SeleniumBase" width="32" /> 1. <code>BaseCase</code> direct inheritance</h3>

View File

@ -28,6 +28,8 @@ msedge-selenium-tools==3.141.3
more-itertools==5.0.0;python_version<"3.5"
more-itertools==8.7.0;python_version>="3.5"
cssselect==1.1.0
filelock==3.0.12
fasteners==0.16
pluggy==0.13.1
py==1.8.1;python_version<"3.5"
py==1.10.0;python_version>="3.5"
@ -91,7 +93,7 @@ pdfminer.six==20201018;python_version>="3.5"
coverage==5.5
pytest-cov==2.11.1
flake8==3.7.9;python_version<"3.5"
flake8==3.9.0;python_version>="3.5"
flake8==3.9.1;python_version>="3.5"
pyflakes==2.1.1;python_version<"3.5"
pyflakes==2.3.1;python_version>="3.5"
pycodestyle==2.5.0;python_version<"3.5"

View File

@ -1,2 +1,2 @@
# seleniumbase package
__version__ = "1.60.0"
__version__ = "1.61.0"

View File

@ -253,6 +253,9 @@ def main():
data.append("report.html")
data.append("report.xml")
data.append("dashboard.html")
data.append("dashboard.json")
data.append("dash_pie.json")
data.append("dashboard.lock")
data.append("allure_report")
data.append("allure-report")
data.append("allure_results")
@ -265,6 +268,7 @@ def main():
data.append("visual_baseline")
data.append("selenium-server-standalone.jar")
data.append("proxy.zip")
data.append("proxy.lock")
data.append("verbose_hub_server.dat")
data.append("verbose_node_server.dat")
data.append("ip_of_grid_hub.dat")

View File

@ -2,7 +2,6 @@ import logging
import os
import re
import sys
import time
import urllib3
import warnings
from selenium import webdriver
@ -29,6 +28,7 @@ EXTENSIONS_DIR = os.path.dirname(os.path.realpath(extensions.__file__))
DISABLE_CSP_ZIP_PATH = "%s/%s" % (EXTENSIONS_DIR, "disable_csp.zip")
PROXY_ZIP_PATH = proxy_helper.PROXY_ZIP_PATH
PROXY_ZIP_PATH_2 = proxy_helper.PROXY_ZIP_PATH_2
PROXY_ZIP_LOCK = proxy_helper.PROXY_ZIP_LOCK
PLATFORM = sys.platform
IS_WINDOWS = False
LOCAL_CHROMEDRIVER = None
@ -93,21 +93,18 @@ def _add_chrome_proxy_extension(
""" Implementation of https://stackoverflow.com/a/35293284 for
https://stackoverflow.com/questions/12848327/
(Run Selenium on a proxy server that requires authentication.) """
import random
arg_join = " ".join(sys.argv)
if not ("-n" in sys.argv or "-n=" in arg_join or arg_join == "-c"):
if not ("-n" in sys.argv or " -n=" in arg_join or arg_join == "-c"):
# Single-threaded
proxy_helper.create_proxy_zip(proxy_string, proxy_user, proxy_pass)
else:
# Pytest multi-threaded test
import threading
lock = threading.Lock()
with lock:
time.sleep(random.uniform(0.02, 0.15))
import fasteners
proxy_zip_lock = fasteners.InterProcessLock(PROXY_ZIP_LOCK)
with proxy_zip_lock:
if not os.path.exists(PROXY_ZIP_PATH):
proxy_helper.create_proxy_zip(
proxy_string, proxy_user, proxy_pass)
time.sleep(random.uniform(0.1, 0.2))
proxy_zip = PROXY_ZIP_PATH
if not os.path.exists(PROXY_ZIP_PATH):
# Handle "Permission denied" on the default proxy.zip path

View File

@ -7,6 +7,7 @@ DRIVER_DIR = os.path.dirname(os.path.realpath(drivers.__file__))
PROXY_ZIP_PATH = "%s/%s" % (DRIVER_DIR, "proxy.zip")
DOWNLOADS_DIR = constants.Files.DOWNLOADS_FOLDER
PROXY_ZIP_PATH_2 = "%s/%s" % (DOWNLOADS_DIR, "proxy.zip")
PROXY_ZIP_LOCK = "%s/%s" % (DOWNLOADS_DIR, "proxy.lock")
def create_proxy_zip(proxy_string, proxy_user, proxy_pass):
@ -90,5 +91,7 @@ def remove_proxy_zip_if_present():
os.remove(PROXY_ZIP_PATH)
elif os.path.exists(PROXY_ZIP_PATH_2):
os.remove(PROXY_ZIP_PATH_2)
if os.path.exists(PROXY_ZIP_LOCK):
os.remove(PROXY_ZIP_LOCK)
except Exception:
pass

View File

@ -80,6 +80,7 @@ class BaseCase(unittest.TestCase):
self.__device_width = None
self.__device_height = None
self.__device_pixel_ratio = None
self.__driver_browser_map = {}
# Requires self._* instead of self.__* for external class use
self._language = "English"
self._presentation_slides = {}
@ -2134,8 +2135,10 @@ class BaseCase(unittest.TestCase):
device_height=d_height,
device_pixel_ratio=d_p_r)
self._drivers_list.append(new_driver)
self.__driver_browser_map[new_driver] = browser_name
if switch_to:
self.driver = new_driver
self.browser = browser_name
if self.headless:
# Make sure the invisible browser window is big enough
width = settings.HEADLESS_START_WIDTH
@ -2209,11 +2212,15 @@ class BaseCase(unittest.TestCase):
""" Sets self.driver to the specified driver.
You may need this if using self.get_new_driver() in your code. """
self.driver = driver
if self.driver in self.__driver_browser_map:
self.browser = self.__driver_browser_map[self.driver]
def switch_to_default_driver(self):
""" Sets self.driver to the default/original driver. """
self.__check_scope()
self.driver = self._default_driver
if self.driver in self.__driver_browser_map:
self.browser = self.__driver_browser_map[self.driver]
def save_screenshot(self, name, folder=None):
""" The screenshot will be in PNG format. """
@ -6497,7 +6504,7 @@ class BaseCase(unittest.TestCase):
self.check_window(name="github_page", level=2)
self.check_window(name="wikipedia_page", level=3)
"""
self.__check_scope()
self.wait_for_ready_state_complete()
if level == "0":
level = 0
if level == "1":
@ -6510,11 +6517,10 @@ class BaseCase(unittest.TestCase):
raise Exception('Parameter "level" must be set to 0, 1, 2, or 3!')
if self.demo_mode:
raise Exception(
"WARNING: Using Demo Mode will break layout tests "
"that use the check_window() method due to custom "
"HTML edits being made on the page!\n"
"Please rerun without using Demo Mode!")
message = (
"WARNING: Using check_window() from Demo Mode may lead "
"to unexpected results caused by Demo Mode HTML changes.")
logging.info(message)
module = self.__class__.__module__
if '.' in module and len(module.split('.')[-1]) > 1:
@ -7263,15 +7269,20 @@ class BaseCase(unittest.TestCase):
self.guest_mode = sb_config.guest_mode
self.devtools = sb_config.devtools
self.remote_debug = sb_config.remote_debug
self._multithreaded = sb_config._multithreaded
self._reuse_session = sb_config.reuse_session
self._crumbs = sb_config.crumbs
self.dashboard = sb_config.dashboard
self._dash_initialized = sb_config._dashboard_initialized
if self.dashboard and self._multithreaded:
import fasteners
self.dash_lock = fasteners.InterProcessLock(
constants.Dashboard.LOCKFILE)
self.swiftshader = sb_config.swiftshader
self.user_data_dir = sb_config.user_data_dir
self.extension_zip = sb_config.extension_zip
self.extension_dir = sb_config.extension_dir
self.maximize_option = sb_config.maximize_option
self._reuse_session = sb_config.reuse_session
self._crumbs = sb_config.crumbs
self.save_screenshot_after_test = sb_config.save_screenshot
self.visual_baseline = sb_config.visual_baseline
self.timeout_multiplier = sb_config.timeout_multiplier
@ -7385,13 +7396,21 @@ class BaseCase(unittest.TestCase):
# Dashboard pre-processing:
if self.dashboard:
sb_config._sbase_detected = True
sb_config._only_unittest = False
if not self._dash_initialized:
sb_config._dashboard_initialized = True
if self._multithreaded:
with self.dash_lock:
sb_config._sbase_detected = True
sb_config._only_unittest = False
if not self._dash_initialized:
sb_config._dashboard_initialized = True
self._dash_initialized = True
self.__process_dashboard(False, init=True)
else:
sb_config._sbase_detected = True
self._dash_initialized = True
self.__process_dashboard(False, init=True)
sb_config._only_unittest = False
if not self._dash_initialized:
sb_config._dashboard_initialized = True
self._dash_initialized = True
self.__process_dashboard(False, init=True)
has_url = False
if self._reuse_session:
@ -7690,6 +7709,23 @@ class BaseCase(unittest.TestCase):
def __process_dashboard(self, has_exception, init=False):
''' SeleniumBase Dashboard Processing '''
existing_res = sb_config._results # Used by multithreaded tests
if self._multithreaded:
abs_path = os.path.abspath('.')
dash_json_loc = constants.Dashboard.DASH_JSON
dash_jsonpath = os.path.join(abs_path, dash_json_loc)
if not init and os.path.exists(dash_jsonpath):
with open(dash_jsonpath, 'r') as f:
dash_json = f.read().strip()
dash_data, d_id, dash_runtimes, d_stats = json.loads(dash_json)
num_passed, num_failed, num_skipped, num_untested = d_stats
sb_config._results = dash_data
sb_config._display_id = d_id
sb_config._duration = dash_runtimes
sb_config.item_count_passed = num_passed
sb_config.item_count_failed = num_failed
sb_config.item_count_skipped = num_skipped
sb_config.item_count_untested = num_untested
if len(sb_config._extra_dash_entries) > 0:
# First take care of existing entries from non-SeleniumBase tests
for test_id in sb_config._extra_dash_entries:
@ -7735,6 +7771,11 @@ class BaseCase(unittest.TestCase):
sb_config.item_count_skipped += 1
sb_config.item_count_untested -= 1
sb_config._results[test_id] = "Skipped"
elif self._multithreaded and test_id in existing_res.keys() and (
existing_res[test_id] == "Skipped"):
sb_config.item_count_skipped += 1
sb_config.item_count_untested -= 1
sb_config._results[test_id] = "Skipped"
elif has_exception:
# pytest-rerunfailures may cause duplicate results
if test_id not in sb_config._results.keys() or (
@ -7772,6 +7813,14 @@ class BaseCase(unittest.TestCase):
if sb_config._using_html_report:
# Add the pie chart to the pytest html report
sb_config._saved_dashboard_pie = self.extract_chart()
if self._multithreaded:
abs_path = os.path.abspath('.')
dash_pie = json.dumps(sb_config._saved_dashboard_pie)
dash_pie_loc = constants.Dashboard.DASH_PIE
pie_path = os.path.join(abs_path, dash_pie_loc)
pie_file = codecs.open(pie_path, "w+", encoding="utf-8")
pie_file.writelines(dash_pie)
pie_file.close()
head = (
'<head><meta charset="utf-8" />'
'<meta property="og:image" '
@ -7881,6 +7930,17 @@ class BaseCase(unittest.TestCase):
out_file.writelines(the_html)
out_file.close()
time.sleep(0.05) # Add time for dashboard server to process updates
if self._multithreaded:
d_stats = (num_passed, num_failed, num_skipped, num_untested)
_results = sb_config._results
_display_id = sb_config._display_id
_duration = sb_config._duration
dash_json = json.dumps((_results, _display_id, _duration, d_stats))
dash_json_loc = constants.Dashboard.DASH_JSON
dash_jsonpath = os.path.join(abs_path, dash_json_loc)
dash_json_file = codecs.open(dash_jsonpath, "w+", encoding="utf-8")
dash_json_file.writelines(dash_json)
dash_json_file.close()
def has_exception(self):
""" (This method should ONLY be used in custom tearDown() methods.)
@ -8025,7 +8085,11 @@ class BaseCase(unittest.TestCase):
test_logpath, self.driver,
self.__last_page_source)
if self.dashboard:
self.__process_dashboard(has_exception)
if self._multithreaded:
with self.dash_lock:
self.__process_dashboard(has_exception)
else:
self.__process_dashboard(has_exception)
# (Pytest) Finally close all open browser windows
self.__quit_all_drivers()
if self.headless:

View File

@ -29,10 +29,13 @@ class Charts:
class Dashboard:
TITLE = "SeleniumBase Test Results Dashboard"
STYLE_CSS = 'https://seleniumbase.io/cdn/css/pytest_style.css'
STYLE_CSS = "https://seleniumbase.io/cdn/css/pytest_style.css"
META_REFRESH_HTML = '<meta http-equiv="refresh" content="12">'
# LIVE_JS = 'https://livejs.com/live.js#html'
LIVE_JS = 'https://seleniumbase.io/cdn/js/live.js#html'
LIVE_JS = "https://seleniumbase.io/cdn/js/live.js#html"
LOCKFILE = Files.DOWNLOADS_FOLDER + "/dashboard.lock"
DASH_JSON = Files.DOWNLOADS_FOLDER + "/dashboard.json"
DASH_PIE = Files.DOWNLOADS_FOLDER + "/dash_pie.json"
class SavedCookies:
@ -60,7 +63,7 @@ class Warnings:
class JQuery:
VER = "3.5.1"
VER = "3.6.0"
MIN_JS = (
"https://cdnjs.cloudflare.com/ajax/libs/jquery/%s/jquery.min.js" % VER)
# MIN_JS = (
@ -95,7 +98,7 @@ class Messenger:
class Underscore:
VER = "1.12.0"
VER = "1.12.1"
MIN_JS = ("https://cdnjs.cloudflare.com/ajax/libs/"
"underscore.js/%s/underscore-min.js" % VER)
@ -148,7 +151,7 @@ class Reveal:
class HighCharts:
VER = "8.2.2"
VER = "9.0.1"
HC_CSS = ("https://code.highcharts.com/%s/css/highcharts.css" % VER)
HC_JS = ("https://code.highcharts.com/%s/highcharts.js" % VER)
EXPORTING_JS = ("https://code.highcharts.com/"

View File

@ -582,13 +582,18 @@ def pytest_addoption(parser):
"\n It's not thread-safe for WebDriver processes! "
"\n Use --time-limit=s from SeleniumBase instead!\n")
# The SeleniumBase Dashboard does not yet support multi-threadeded tests.
if "--dashboard" in sys_argv:
arg_join = " ".join(sys_argv)
if ("-n" in sys_argv) or ("-n=" in arg_join):
raise Exception(
"\n\n Multi-threading is not yet supported using --dashboard"
"\n (You can speed up tests using --reuse-session / --rs)\n")
# Dashboard Mode does not support tests using forked subprocesses.
if "--forked" in sys_argv and "--dashboard" in sys_argv:
raise Exception(
'\n\n Dashboard Mode does NOT support forked subprocesses!'
'\n (*** DO NOT combine "--forked" with "--dashboard"! ***)\n')
# Reuse-Session Mode does not support tests using forked subprocesses.
if "--forked" in sys_argv and (
"--rs" in sys_argv or "--reuse-session" in sys_argv):
raise Exception(
'\n\n Reuse-Session Mode does NOT support forked subprocesses!'
'\n (DO NOT combine "--forked" with "--rs"/"--reuse-session"!)\n')
# As a shortcut, you can use "--edge" instead of "--browser=edge", etc,
# but you can only specify one default browser for tests. (Default: chrome)
@ -754,9 +759,9 @@ def pytest_configure(config):
sb_config._html_report_name = None # The name of the pytest html report
arg_join = " ".join(sys.argv)
if ("-n" in sys.argv) or ("-n=" in arg_join):
if ("-n" in sys.argv) or (" -n=" in arg_join) or ("-c" in sys.argv):
sb_config._multithreaded = True
if ("--html" in sys.argv or "--html=" in arg_join):
if ("--html" in sys.argv or " --html=" in arg_join):
sb_config._using_html_report = True
sb_config._html_report_name = config.getoption("htmlpath")
if sb_config.dashboard:
@ -874,6 +879,16 @@ def pytest_sessionfinish(session):
def pytest_terminal_summary(terminalreporter):
latest_logs_dir = os.getcwd() + "/latest_logs/"
if sb_config._multithreaded:
if os.path.exists(latest_logs_dir) and os.listdir(latest_logs_dir):
sb_config._has_exception = True
if sb_config.dashboard:
abs_path = os.path.abspath('.')
dash_lock = constants.Dashboard.LOCKFILE
dash_lock_path = os.path.join(abs_path, dash_lock)
if os.path.exists(dash_lock_path):
sb_config._only_unittest = False
if sb_config._has_exception and (
sb_config.dashboard and not sb_config._only_unittest):
# Print link a second time because the first one may be off-screen
@ -882,7 +897,6 @@ def pytest_terminal_summary(terminalreporter):
"-", "Dashboard: %s" % dashboard_file)
if sb_config._has_exception or sb_config.save_screenshot:
# Log files are generated during test failures and Screenshot Mode
latest_logs_dir = os.getcwd() + "/latest_logs/"
terminalreporter.write_sep(
"-", "LogPath: %s" % latest_logs_dir)
@ -903,10 +917,19 @@ def pytest_unconfigure():
if hasattr(sb_config, 'log_path'):
log_helper.archive_logs_if_set(
sb_config.log_path, sb_config.archive_logs)
# Dashboard post-processing: Disable time-based refresh and stamp complete
if sb_config._multithreaded and sb_config.dashboard:
abs_path = os.path.abspath('.')
dash_lock = constants.Dashboard.LOCKFILE
dash_lock_path = os.path.join(abs_path, dash_lock)
if os.path.exists(dash_lock_path):
sb_config._only_unittest = False
if hasattr(sb_config, 'dashboard') and (
sb_config.dashboard and not sb_config._only_unittest):
if sb_config._multithreaded:
import fasteners
dash_lock = fasteners.InterProcessLock(
constants.Dashboard.LOCKFILE)
stamp = ""
if sb_config._dash_is_html_report:
# (If the Dashboard URL is the same as the HTML Report URL:)
@ -924,7 +947,11 @@ def pytest_unconfigure():
swap_with_3 = '<td class="col-result">Unreported</td>'
find_it_4 = 'href="https://seleniumbase.io/img/dash_pie.png"'
swap_with_4 = 'href="https://seleniumbase.io/img/dash_pie_2.png"'
find_it_5 = 'content="https://seleniumbase.io/img/dash_pie.png"'
swap_with_5 = 'content="https://seleniumbase.io/img/dash_pie_2.png"'
try:
if sb_config._multithreaded:
dash_lock.acquire()
abs_path = os.path.abspath('.')
dashboard_path = os.path.join(abs_path, "dashboard.html")
# Part 1: Finalizing the dashboard / integrating html report
@ -932,11 +959,23 @@ def pytest_unconfigure():
the_html_d = None
with open(dashboard_path, 'r', encoding='utf-8') as f:
the_html_d = f.read()
if sb_config._multithreaded and "-c" in sys.argv:
# Threads have "-c" in sys.argv, except for the last
raise Exception('Break out of "try" block.')
if sb_config._multithreaded:
dash_pie_loc = constants.Dashboard.DASH_PIE
pie_path = os.path.join(abs_path, dash_pie_loc)
if os.path.exists(pie_path):
import json
with open(pie_path, 'r') as f:
dash_pie = f.read().strip()
sb_config._saved_dashboard_pie = json.loads(dash_pie)
# If the test run doesn't complete by itself, stop refresh
the_html_d = the_html_d.replace(find_it, swap_with)
the_html_d = the_html_d.replace(find_it_2, swap_with_2)
the_html_d = the_html_d.replace(find_it_3, swap_with_3)
the_html_d = the_html_d.replace(find_it_4, swap_with_4)
the_html_d = the_html_d.replace(find_it_5, swap_with_5)
the_html_d += stamp
if sb_config._dash_is_html_report and (
sb_config._saved_dashboard_pie):
@ -985,8 +1024,13 @@ def pytest_unconfigure():
the_html_r += sb_config._dash_final_summary
with open(html_report_path, "w", encoding='utf-8') as f:
f.write(the_html_r) # Finalize the HTML report
except KeyboardInterrupt:
pass
except Exception:
pass
finally:
if sb_config._multithreaded:
dash_lock.release()
@pytest.fixture()
@ -1037,6 +1081,8 @@ def pytest_runtest_makereport(item, call):
pytest_html = item.config.pluginmanager.getplugin('html')
outcome = yield
report = outcome.get_result()
if sb_config._multithreaded:
sb_config._using_html_report = True # For Dashboard use
if pytest_html and report.when == 'call' and (
hasattr(sb_config, 'dashboard')):
if sb_config.dashboard and not sb_config._sbase_detected:

View File

@ -469,6 +469,7 @@ class SeleniumBrowser(Plugin):
test.test.timeout_multiplier = self.options.timeout_multiplier
test.test.use_grid = False
test.test.dashboard = False
test.test._multithreaded = False
test.test._reuse_session = False
if test.test.servername != "localhost":
# Use Selenium Grid (Use --server="127.0.0.1" for localhost Grid)

View File

@ -37,7 +37,7 @@ if sys.argv[-1] == 'publish':
'>>> Confirm release PUBLISH to PyPI? (yes/no): ')).lower().strip()
if reply == 'yes':
print("\n*** Checking code health with flake8:\n")
os.system("python -m pip install 'flake8==3.9.0'")
os.system("python -m pip install 'flake8==3.9.1'")
flake8_status = os.system("flake8 --exclude=temp")
if flake8_status != 0:
print("\nWARNING! Fix flake8 issues before publishing to PyPI!\n")
@ -134,6 +134,8 @@ setup(
'more-itertools==5.0.0;python_version<"3.5"',
'more-itertools==8.7.0;python_version>="3.5"',
'cssselect==1.1.0',
'filelock==3.0.12',
'fasteners==0.16',
'pluggy==0.13.1',
'py==1.8.1;python_version<"3.5"',
'py==1.10.0;python_version>="3.5"',
@ -200,7 +202,7 @@ setup(
# pip install -e .[flake]
'flake': [
'flake8==3.7.9;python_version<"3.5"',
'flake8==3.9.0;python_version>="3.5"',
'flake8==3.9.1;python_version>="3.5"',
'pyflakes==2.1.1;python_version<"3.5"',
'pyflakes==2.3.1;python_version>="3.5"',
'pycodestyle==2.5.0;python_version<"3.5"',