Created
February 6, 2019 17:11
-
-
Save stephendonner/089e9cfc97670517abedf050531b32f6 to your computer and use it in GitHub Desktop.
Stephens-MacBook-Pro:wptagent stephendonner$ black --diff . > diff.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- internal/base_browser.py 2018-10-25 17:07:28.857131 +0000 | |
+++ internal/base_browser.py 2019-02-06 17:08:25.794734 +0000 | |
@@ -5,12 +5,15 @@ | |
import os | |
class BaseBrowser(object): | |
"""Browser base""" | |
+ | |
def __init__(self): | |
- self.support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
+ self.support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
def execute_js(self, script): | |
"""Stub to be overridden""" | |
return None | |
--- internal/browsers.py 2018-10-25 17:07:28.858559 +0000 | |
+++ internal/browsers.py 2019-02-06 17:08:25.966987 +0000 | |
@@ -6,21 +6,24 @@ | |
import os | |
class Browsers(object): | |
"""Controller for handling several browsers""" | |
+ | |
def __init__(self, options, browsers, adb, ios): | |
import ujson as json | |
+ | |
self.options = options | |
self.browsers = None | |
if browsers is not None: | |
self.browsers = {k.lower(): v for k, v in browsers.items()} | |
self.adb = adb | |
self.ios = ios | |
- android_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'android_browsers.json') | |
- with open(android_file, 'rb') as f_in: | |
+ android_file = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "android_browsers.json" | |
+ ) | |
+ with open(android_file, "rb") as f_in: | |
self.android_browsers = {k.lower(): v for k, v in json.load(f_in).items()} | |
def is_ready(self): | |
"""Check to see if the configured browsers are ready to go""" | |
ready = True | |
@@ -28,64 +31,81 @@ | |
ready = self.adb.is_device_ready() | |
elif self.options.iOS and self.ios is not None: | |
ready = self.ios.is_device_ready() | |
else: | |
for browser in self.browsers: | |
- if 'exe' in self.browsers[browser]: | |
- exe = self.browsers[browser]['exe'] | |
+ if "exe" in self.browsers[browser]: | |
+ exe = self.browsers[browser]["exe"] | |
if not os.path.isfile(exe): | |
- logging.critical("Browser executable is missing for %s: '%s'", browser, exe) | |
+ logging.critical( | |
+ "Browser executable is missing for %s: '%s'", browser, exe | |
+ ) | |
ready = False | |
return ready | |
def get_browser(self, name, job): | |
"""Return an instance of the browser logic""" | |
browser = None | |
name = name.lower() | |
- if name.startswith('ie '): | |
- name = 'ie' | |
+ if name.startswith("ie "): | |
+ name = "ie" | |
if self.options.android: | |
- if 'customBrowser_package' in job: | |
+ if "customBrowser_package" in job: | |
name = "chrome" | |
- separator = name.rfind('-') | |
+ separator = name.rfind("-") | |
if separator >= 0: | |
- name = name[separator + 1:].strip() | |
+ name = name[separator + 1 :].strip() | |
mode = None | |
- separator = name.find('(') | |
+ separator = name.find("(") | |
if separator >= 0: | |
end = name.find(")", separator) | |
if end >= 0: | |
- mode = name[separator + 1:end].strip() | |
+ mode = name[separator + 1 : end].strip() | |
name = name[:separator].strip() | |
if name in self.android_browsers: | |
config = self.android_browsers[name] | |
- config['all'] = self.android_browsers | |
+ config["all"] = self.android_browsers | |
if mode is not None: | |
- config['mode'] = mode | |
- if config['type'] == 'chrome': | |
+ config["mode"] = mode | |
+ if config["type"] == "chrome": | |
from .chrome_android import ChromeAndroid | |
+ | |
browser = ChromeAndroid(self.adb, config, self.options, job) | |
- if config['type'] == 'blackbox': | |
+ if config["type"] == "blackbox": | |
from .blackbox_android import BlackBoxAndroid | |
+ | |
browser = BlackBoxAndroid(self.adb, config, self.options, job) | |
elif self.options.iOS and self.ios is not None: | |
from .safari_ios import iWptBrowser | |
+ | |
browser = iWptBrowser(self.ios, self.options, job) | |
- elif 'type' in job and job['type'] == 'traceroute': | |
+ elif "type" in job and job["type"] == "traceroute": | |
from .traceroute import Traceroute | |
+ | |
browser = Traceroute(self.options, job) | |
- elif name in self.browsers and 'exe' in self.browsers[name]: | |
- job['browser_info'] = self.browsers[name] | |
- if 'type' in self.browsers[name] and self.browsers[name]['type'] == 'Firefox': | |
+ elif name in self.browsers and "exe" in self.browsers[name]: | |
+ job["browser_info"] = self.browsers[name] | |
+ if ( | |
+ "type" in self.browsers[name] | |
+ and self.browsers[name]["type"] == "Firefox" | |
+ ): | |
from .firefox import Firefox | |
- browser = Firefox(self.browsers[name]['exe'], self.options, job) | |
- elif 'type' in self.browsers[name] and self.browsers[name]['type'] == 'Edge': | |
+ | |
+ browser = Firefox(self.browsers[name]["exe"], self.options, job) | |
+ elif ( | |
+ "type" in self.browsers[name] and self.browsers[name]["type"] == "Edge" | |
+ ): | |
from .microsoft_edge import Edge | |
- browser = Edge(self.browsers[name]['exe'], self.options, job) | |
- elif 'type' in self.browsers[name] and self.browsers[name]['type'] == 'IE': | |
+ | |
+ browser = Edge(self.browsers[name]["exe"], self.options, job) | |
+ elif "type" in self.browsers[name] and self.browsers[name]["type"] == "IE": | |
from .internet_explorer import InternetExplorer | |
- browser = InternetExplorer(self.browsers[name]['exe'], self.options, job) | |
+ | |
+ browser = InternetExplorer( | |
+ self.browsers[name]["exe"], self.options, job | |
+ ) | |
else: | |
from .chrome_desktop import ChromeDesktop | |
- browser = ChromeDesktop(self.browsers[name]['exe'], self.options, job) | |
+ | |
+ browser = ChromeDesktop(self.browsers[name]["exe"], self.options, job) | |
return browser | |
--- internal/chrome_desktop.py 2018-10-25 17:07:35.746345 +0000 | |
+++ internal/chrome_desktop.py 2019-02-06 17:08:26.104294 +0000 | |
@@ -11,108 +11,116 @@ | |
import time | |
from .desktop_browser import DesktopBrowser | |
from .devtools_browser import DevtoolsBrowser | |
CHROME_COMMAND_LINE_OPTIONS = [ | |
- '--disable-background-networking', | |
- '--no-default-browser-check', | |
- '--no-first-run', | |
- '--process-per-tab', | |
- '--new-window', | |
- '--disable-infobars', | |
- '--disable-translate', | |
- '--disable-notifications', | |
- '--disable-desktop-notifications', | |
- '--disable-save-password-bubble', | |
- '--allow-running-insecure-content', | |
- '--disable-component-update', | |
- '--disable-background-downloads', | |
- '--disable-add-to-shelf', | |
- '--disable-client-side-phishing-detection', | |
- '--disable-datasaver-prompt', | |
- '--disable-default-apps', | |
- '--disable-device-discovery-notifications', | |
- '--disable-domain-reliability', | |
- '--safebrowsing-disable-auto-update', | |
- '--disable-background-timer-throttling', | |
- '--disable-browser-side-navigation', | |
- '--net-log-capture-mode=IncludeCookiesAndCredentials' | |
+ "--disable-background-networking", | |
+ "--no-default-browser-check", | |
+ "--no-first-run", | |
+ "--process-per-tab", | |
+ "--new-window", | |
+ "--disable-infobars", | |
+ "--disable-translate", | |
+ "--disable-notifications", | |
+ "--disable-desktop-notifications", | |
+ "--disable-save-password-bubble", | |
+ "--allow-running-insecure-content", | |
+ "--disable-component-update", | |
+ "--disable-background-downloads", | |
+ "--disable-add-to-shelf", | |
+ "--disable-client-side-phishing-detection", | |
+ "--disable-datasaver-prompt", | |
+ "--disable-default-apps", | |
+ "--disable-device-discovery-notifications", | |
+ "--disable-domain-reliability", | |
+ "--safebrowsing-disable-auto-update", | |
+ "--disable-background-timer-throttling", | |
+ "--disable-browser-side-navigation", | |
+ "--net-log-capture-mode=IncludeCookiesAndCredentials", | |
] | |
HOST_RULES = [ | |
'"MAP cache.pack.google.com 127.0.0.1"', | |
- '"MAP clients1.google.com 127.0.0.1"' | |
+ '"MAP clients1.google.com 127.0.0.1"', | |
] | |
+ | |
class ChromeDesktop(DesktopBrowser, DevtoolsBrowser): | |
"""Desktop Chrome""" | |
+ | |
def __init__(self, path, options, job): | |
self.options = options | |
DesktopBrowser.__init__(self, path, options, job) | |
- use_devtools_video = True if self.job['capture_display'] is None else False | |
- DevtoolsBrowser.__init__(self, options, job, use_devtools_video=use_devtools_video) | |
- self.start_page = 'http://127.0.0.1:8888/orange.html' | |
+ use_devtools_video = True if self.job["capture_display"] is None else False | |
+ DevtoolsBrowser.__init__( | |
+ self, options, job, use_devtools_video=use_devtools_video | |
+ ) | |
+ self.start_page = "http://127.0.0.1:8888/orange.html" | |
self.connected = False | |
self.is_chrome = True | |
def launch(self, job, task): | |
"""Launch the browser""" | |
self.install_policy() | |
args = list(CHROME_COMMAND_LINE_OPTIONS) | |
host_rules = list(HOST_RULES) | |
- if 'host_rules' in task: | |
- host_rules.extend(task['host_rules']) | |
- args.append('--host-resolver-rules=' + ','.join(host_rules)) | |
- args.extend(['--window-position="0,0"', | |
- '--window-size="{0:d},{1:d}"'.format(task['width'], task['height'])]) | |
- args.append('--remote-debugging-port={0:d}'.format(task['port'])) | |
- if 'ignoreSSL' in job and job['ignoreSSL']: | |
- args.append('--ignore-certificate-errors') | |
- if 'netlog' in job and job['netlog']: | |
- netlog_file = os.path.join(task['dir'], task['prefix']) + '_netlog.txt' | |
+ if "host_rules" in task: | |
+ host_rules.extend(task["host_rules"]) | |
+ args.append("--host-resolver-rules=" + ",".join(host_rules)) | |
+ args.extend( | |
+ [ | |
+ '--window-position="0,0"', | |
+ '--window-size="{0:d},{1:d}"'.format(task["width"], task["height"]), | |
+ ] | |
+ ) | |
+ args.append("--remote-debugging-port={0:d}".format(task["port"])) | |
+ if "ignoreSSL" in job and job["ignoreSSL"]: | |
+ args.append("--ignore-certificate-errors") | |
+ if "netlog" in job and job["netlog"]: | |
+ netlog_file = os.path.join(task["dir"], task["prefix"]) + "_netlog.txt" | |
args.append('--log-net-log="{0}"'.format(netlog_file)) | |
- if 'profile' in task: | |
- args.append('--user-data-dir="{0}"'.format(task['profile'])) | |
- self.setup_prefs(task['profile']) | |
- if 'overrideHosts' in task and task['overrideHosts']: | |
- args.append('--enable-features=NetworkService') | |
+ if "profile" in task: | |
+ args.append('--user-data-dir="{0}"'.format(task["profile"])) | |
+ self.setup_prefs(task["profile"]) | |
+ if "overrideHosts" in task and task["overrideHosts"]: | |
+ args.append("--enable-features=NetworkService") | |
if self.options.xvfb: | |
- args.append('--disable-gpu') | |
+ args.append("--disable-gpu") | |
if self.options.dockerized: | |
- args.append('--no-sandbox') | |
- if platform.system() == "Linux": | |
- args.append('--disable-setuid-sandbox') | |
- if self.path.find(' ') > -1: | |
+ args.append("--no-sandbox") | |
+ if platform.system() == "Linux": | |
+ args.append("--disable-setuid-sandbox") | |
+ if self.path.find(" ") > -1: | |
command_line = '"{0}"'.format(self.path) | |
else: | |
command_line = self.path | |
- command_line += ' ' + ' '.join(args) | |
- if 'addCmdLine' in job: | |
- command_line += ' ' + job['addCmdLine'] | |
- command_line += ' ' + 'about:blank' | |
+ command_line += " " + " ".join(args) | |
+ if "addCmdLine" in job: | |
+ command_line += " " + job["addCmdLine"] | |
+ command_line += " " + "about:blank" | |
# re-try launching and connecting a few times if necessary | |
connected = False | |
count = 0 | |
while not connected and count < 3: | |
count += 1 | |
DesktopBrowser.launch_browser(self, command_line) | |
if DevtoolsBrowser.connect(self, task): | |
connected = True | |
elif count < 3: | |
DesktopBrowser.stop(self, job, task) | |
- if 'error' in task and task['error'] is not None: | |
- task['error'] = None | |
+ if "error" in task and task["error"] is not None: | |
+ task["error"] = None | |
# try launching the browser with no command-line options to | |
# do any one-time startup initialization | |
if count == 1: | |
- bare_options = ['--disable-gpu'] | |
+ bare_options = ["--disable-gpu"] | |
if self.options.dockerized: | |
- bare_options.append('--no-sandbox') | |
+ bare_options.append("--no-sandbox") | |
if platform.system() == "Linux": | |
- bare_options.append('--disable-setuid-sandbox') | |
- logging.debug('Launching browser with no options for configuration') | |
- relaunch = '"{0}"'.format(self.path) + ' ' + ' '.join(bare_options) | |
+ bare_options.append("--disable-setuid-sandbox") | |
+ logging.debug("Launching browser with no options for configuration") | |
+ relaunch = '"{0}"'.format(self.path) + " " + " ".join(bare_options) | |
DesktopBrowser.launch_browser(self, relaunch) | |
time.sleep(30) | |
DesktopBrowser.stop(self, job, task) | |
time.sleep(10) | |
if connected: | |
@@ -134,51 +142,65 @@ | |
if self.connected: | |
DevtoolsBrowser.disconnect(self) | |
DesktopBrowser.stop(self, job, task) | |
# Make SURE the chrome processes are gone | |
if platform.system() == "Linux": | |
- subprocess.call(['killall', '-9', 'chrome']) | |
- netlog_file = os.path.join(task['dir'], task['prefix']) + '_netlog.txt' | |
+ subprocess.call(["killall", "-9", "chrome"]) | |
+ netlog_file = os.path.join(task["dir"], task["prefix"]) + "_netlog.txt" | |
if os.path.isfile(netlog_file): | |
- netlog_gzip = netlog_file + '.gz' | |
- with open(netlog_file, 'rb') as f_in: | |
- with gzip.open(netlog_gzip, 'wb', 7) as f_out: | |
+ netlog_gzip = netlog_file + ".gz" | |
+ with open(netlog_file, "rb") as f_in: | |
+ with gzip.open(netlog_gzip, "wb", 7) as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
if os.path.isfile(netlog_gzip): | |
os.remove(netlog_file) | |
self.remove_policy() | |
def setup_prefs(self, profile_dir): | |
"""Install our base set of preferences""" | |
- src = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'chrome', 'prefs.json') | |
- dest_dir = os.path.join(profile_dir, 'Default') | |
+ src = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "chrome", | |
+ "prefs.json", | |
+ ) | |
+ dest_dir = os.path.join(profile_dir, "Default") | |
try: | |
os.makedirs(dest_dir) | |
- shutil.copy(src, os.path.join(dest_dir, 'Preferences')) | |
+ shutil.copy(src, os.path.join(dest_dir, "Preferences")) | |
except Exception: | |
pass | |
def install_policy(self): | |
"""Install the required policy list (Linux only right now)""" | |
if platform.system() == "Linux": | |
- subprocess.call(['sudo', 'mkdir', '-p', '/etc/opt/chrome/policies/managed']) | |
- subprocess.call(['sudo', 'chmod', '-w', '/etc/opt/chrome/policies/managed']) | |
- subprocess.call(['sudo', 'mkdir', '-p', '/etc/chromium/policies/managed']) | |
- subprocess.call(['sudo', 'chmod', '-w', '/etc/chromium/policies/managed']) | |
- src = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'chrome', 'wpt_policy.json') | |
- subprocess.call(['sudo', 'cp', src, | |
- '/etc/opt/chrome/policies/managed/wpt_policy.json']) | |
- subprocess.call(['sudo', 'cp', src, | |
- '/etc/chromium/policies/managed/wpt_policy.json']) | |
+ subprocess.call(["sudo", "mkdir", "-p", "/etc/opt/chrome/policies/managed"]) | |
+ subprocess.call(["sudo", "chmod", "-w", "/etc/opt/chrome/policies/managed"]) | |
+ subprocess.call(["sudo", "mkdir", "-p", "/etc/chromium/policies/managed"]) | |
+ subprocess.call(["sudo", "chmod", "-w", "/etc/chromium/policies/managed"]) | |
+ src = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "chrome", | |
+ "wpt_policy.json", | |
+ ) | |
+ subprocess.call( | |
+ ["sudo", "cp", src, "/etc/opt/chrome/policies/managed/wpt_policy.json"] | |
+ ) | |
+ subprocess.call( | |
+ ["sudo", "cp", src, "/etc/chromium/policies/managed/wpt_policy.json"] | |
+ ) | |
def remove_policy(self): | |
"""Remove the installed policy""" | |
if platform.system() == "Linux": | |
- subprocess.call(['sudo', 'rm', '/etc/opt/chrome/policies/managed/wpt_policy.json']) | |
- subprocess.call(['sudo', 'rm', '/etc/chromium/policies/managed/wpt_policy.json']) | |
+ subprocess.call( | |
+ ["sudo", "rm", "/etc/opt/chrome/policies/managed/wpt_policy.json"] | |
+ ) | |
+ subprocess.call( | |
+ ["sudo", "rm", "/etc/chromium/policies/managed/wpt_policy.json"] | |
+ ) | |
def on_start_recording(self, task): | |
"""Notification that we are about to start an operation that needs to be recorded""" | |
DesktopBrowser.on_start_recording(self, task) | |
DevtoolsBrowser.on_start_recording(self, task) | |
--- internal/blackbox_android.py 2018-10-25 17:07:28.858092 +0000 | |
+++ internal/blackbox_android.py 2019-02-06 17:08:26.202594 +0000 | |
@@ -8,132 +8,152 @@ | |
import time | |
import monotonic | |
from .android_browser import AndroidBrowser | |
CHROME_COMMAND_LINE_OPTIONS = [ | |
- '--disable-fre', | |
- '--enable-benchmarking', | |
- '--metrics-recording-only', | |
- '--disable-geolocation', | |
- '--disable-background-networking', | |
- '--no-default-browser-check', | |
- '--no-first-run', | |
- '--process-per-tab', | |
- '--disable-infobars', | |
- '--disable-translate', | |
- '--allow-running-insecure-content', | |
- '--disable-save-password-bubble', | |
- '--disable-background-downloads', | |
- '--disable-add-to-shelf', | |
- '--disable-client-side-phishing-detection', | |
- '--disable-datasaver-prompt', | |
- '--disable-default-apps', | |
- '--disable-domain-reliability', | |
- '--disable-background-timer-throttling', | |
- '--safebrowsing-disable-auto-update', | |
- '--disable-sync', | |
- '--disable-external-intent-requests' | |
+ "--disable-fre", | |
+ "--enable-benchmarking", | |
+ "--metrics-recording-only", | |
+ "--disable-geolocation", | |
+ "--disable-background-networking", | |
+ "--no-default-browser-check", | |
+ "--no-first-run", | |
+ "--process-per-tab", | |
+ "--disable-infobars", | |
+ "--disable-translate", | |
+ "--allow-running-insecure-content", | |
+ "--disable-save-password-bubble", | |
+ "--disable-background-downloads", | |
+ "--disable-add-to-shelf", | |
+ "--disable-client-side-phishing-detection", | |
+ "--disable-datasaver-prompt", | |
+ "--disable-default-apps", | |
+ "--disable-domain-reliability", | |
+ "--disable-background-timer-throttling", | |
+ "--safebrowsing-disable-auto-update", | |
+ "--disable-sync", | |
+ "--disable-external-intent-requests", | |
] | |
HOST_RULES = [ | |
'"MAP cache.pack.google.com 127.0.0.1"', | |
- '"MAP clients1.google.com 127.0.0.1"' | |
+ '"MAP clients1.google.com 127.0.0.1"', | |
] | |
-START_PAGE = 'http://www.webpagetest.org/blank.html' | |
+START_PAGE = "http://www.webpagetest.org/blank.html" | |
class BlackBoxAndroid(AndroidBrowser): | |
"""Chrome browser on Android""" | |
+ | |
def __init__(self, adb, config, options, job): | |
self.adb = adb | |
self.task = None | |
self.options = options | |
self.config = dict(config) | |
# pull in the APK info for the browser | |
- if 'apk_info' in job and 'packages' in job['apk_info'] and \ | |
- self.config['package'] in job['apk_info']['packages']: | |
- apk_info = job['apk_info']['packages'][self.config['package']] | |
- self.config['apk_url'] = apk_info['apk_url'] | |
- self.config['md5'] = apk_info['md5'].lower() | |
+ if ( | |
+ "apk_info" in job | |
+ and "packages" in job["apk_info"] | |
+ and self.config["package"] in job["apk_info"]["packages"] | |
+ ): | |
+ apk_info = job["apk_info"]["packages"][self.config["package"]] | |
+ self.config["apk_url"] = apk_info["apk_url"] | |
+ self.config["md5"] = apk_info["md5"].lower() | |
AndroidBrowser.__init__(self, adb, options, job, self.config) | |
def prepare(self, job, task): | |
"""Prepare the profile/OS for the browser""" | |
self.task = task | |
AndroidBrowser.prepare(self, job, task) | |
- if not task['cached']: | |
+ if not task["cached"]: | |
self.clear_profile(task) | |
- if 'settings' in self.config and self.config['settings'] == "Opera Mini": | |
+ if "settings" in self.config and self.config["settings"] == "Opera Mini": | |
self.prepare_opera_mini_settings() | |
def launch(self, job, task): | |
"""Launch the browser""" | |
# copy the Chrome command-line just in case it is needed | |
args = list(CHROME_COMMAND_LINE_OPTIONS) | |
host_rules = list(HOST_RULES) | |
- if 'host_rules' in task: | |
- host_rules.extend(task['host_rules']) | |
- args.append('--host-resolver-rules=' + ','.join(host_rules)) | |
- if 'ignoreSSL' in job and job['ignoreSSL']: | |
- args.append('--ignore-certificate-errors') | |
- command_line = 'chrome ' + ' '.join(args) | |
- if 'addCmdLine' in job: | |
- command_line += ' ' + job['addCmdLine'] | |
- local_command_line = os.path.join(task['dir'], 'chrome-command-line') | |
- remote_command_line = '/data/local/tmp/chrome-command-line' | |
- root_command_line = '/data/local/chrome-command-line' | |
+ if "host_rules" in task: | |
+ host_rules.extend(task["host_rules"]) | |
+ args.append("--host-resolver-rules=" + ",".join(host_rules)) | |
+ if "ignoreSSL" in job and job["ignoreSSL"]: | |
+ args.append("--ignore-certificate-errors") | |
+ command_line = "chrome " + " ".join(args) | |
+ if "addCmdLine" in job: | |
+ command_line += " " + job["addCmdLine"] | |
+ local_command_line = os.path.join(task["dir"], "chrome-command-line") | |
+ remote_command_line = "/data/local/tmp/chrome-command-line" | |
+ root_command_line = "/data/local/chrome-command-line" | |
logging.debug(command_line) | |
- with open(local_command_line, 'wb') as f_out: | |
+ with open(local_command_line, "wb") as f_out: | |
f_out.write(command_line) | |
- if self.adb.adb(['push', local_command_line, remote_command_line]): | |
+ if self.adb.adb(["push", local_command_line, remote_command_line]): | |
os.remove(local_command_line) | |
# try copying it to /data/local for rooted devices that need it there | |
- if self.adb.su('cp {0} {1}'.format(remote_command_line, root_command_line)) is not None: | |
- self.adb.su('chmod 666 {0}'.format(root_command_line)) | |
+ if ( | |
+ self.adb.su("cp {0} {1}".format(remote_command_line, root_command_line)) | |
+ is not None | |
+ ): | |
+ self.adb.su("chmod 666 {0}".format(root_command_line)) | |
# launch the browser | |
- activity = '{0}/{1}'.format(self.config['package'], self.config['activity']) | |
+ activity = "{0}/{1}".format(self.config["package"], self.config["activity"]) | |
start_page = START_PAGE | |
- if 'startPage' in self.config: | |
- start_page = self.config['startPage'] | |
- self.adb.shell(['am', 'start', '-n', activity, '-a', | |
- 'android.intent.action.VIEW', '-d', start_page]) | |
- if 'startupDelay' in self.config: | |
- time.sleep(self.config['startupDelay']) | |
+ if "startPage" in self.config: | |
+ start_page = self.config["startPage"] | |
+ self.adb.shell( | |
+ [ | |
+ "am", | |
+ "start", | |
+ "-n", | |
+ activity, | |
+ "-a", | |
+ "android.intent.action.VIEW", | |
+ "-d", | |
+ start_page, | |
+ ] | |
+ ) | |
+ if "startupDelay" in self.config: | |
+ time.sleep(self.config["startupDelay"]) | |
self.wait_for_network_idle() | |
def run_task(self, task): | |
"""Skip anything that isn't a navigate command""" | |
logging.debug("Running test") | |
- end_time = monotonic.monotonic() + task['test_time_limit'] | |
- task['log_data'] = True | |
- task['current_step'] = 1 | |
- task['prefix'] = task['task_prefix'] | |
- task['video_subdirectory'] = task['task_video_prefix'] | |
- if self.job['video']: | |
- task['video_directories'].append(task['video_subdirectory']) | |
- task['step_name'] = 'Navigate' | |
- task['run_start_time'] = monotonic.monotonic() | |
+ end_time = monotonic.monotonic() + task["test_time_limit"] | |
+ task["log_data"] = True | |
+ task["current_step"] = 1 | |
+ task["prefix"] = task["task_prefix"] | |
+ task["video_subdirectory"] = task["task_video_prefix"] | |
+ if self.job["video"]: | |
+ task["video_directories"].append(task["video_subdirectory"]) | |
+ task["step_name"] = "Navigate" | |
+ task["run_start_time"] = monotonic.monotonic() | |
self.on_start_recording(task) | |
- while len(task['script']) and monotonic.monotonic() < end_time: | |
- command = task['script'].pop(0) | |
- if command['command'] == 'navigate': | |
- task['page_data']['URL'] = command['target'] | |
- activity = '{0}/{1}'.format(self.config['package'], self.config['activity']) | |
+ while len(task["script"]) and monotonic.monotonic() < end_time: | |
+ command = task["script"].pop(0) | |
+ if command["command"] == "navigate": | |
+ task["page_data"]["URL"] = command["target"] | |
+ activity = "{0}/{1}".format( | |
+ self.config["package"], self.config["activity"] | |
+ ) | |
cmd = 'am start -n {0} -a android.intent.action.VIEW \ | |
- -d "{1}"'.format(activity, | |
- command['target'].replace('"', '%22')) | |
- local_intent = os.path.join(task['dir'], 'wpt_intent.sh') | |
- remote_intent = '/data/local/tmp/wpt_intent.sh' | |
- self.adb.shell(['rm', remote_intent]) | |
- with open(local_intent, 'wb') as f_out: | |
+ -d "{1}"'.format( | |
+ activity, command["target"].replace('"', "%22") | |
+ ) | |
+ local_intent = os.path.join(task["dir"], "wpt_intent.sh") | |
+ remote_intent = "/data/local/tmp/wpt_intent.sh" | |
+ self.adb.shell(["rm", remote_intent]) | |
+ with open(local_intent, "wb") as f_out: | |
f_out.write(cmd) | |
- if self.adb.adb(['push', local_intent, remote_intent]): | |
+ if self.adb.adb(["push", local_intent, remote_intent]): | |
os.remove(local_intent) | |
- self.adb.shell(['chmod', '777', remote_intent]) | |
+ self.adb.shell(["chmod", "777", remote_intent]) | |
self.adb.shell([remote_intent]) | |
- self.adb.shell(['rm', remote_intent]) | |
+ self.adb.shell(["rm", remote_intent]) | |
self.wait_for_page_load() | |
self.on_stop_capture(task) | |
self.on_stop_recording(task) | |
self.on_start_processing(task) | |
self.wait_for_processing(task) | |
@@ -144,13 +164,13 @@ | |
pass | |
def stop(self, job, task): | |
"""Stop testing""" | |
# kill the browser | |
- self.adb.shell(['am', 'force-stop', self.config['package']]) | |
- self.adb.shell(['rm', '/data/local/tmp/chrome-command-line']) | |
- self.adb.su('rm /data/local/chrome-command-line') | |
+ self.adb.shell(["am", "force-stop", self.config["package"]]) | |
+ self.adb.shell(["rm", "/data/local/tmp/chrome-command-line"]) | |
+ self.adb.su("rm /data/local/chrome-command-line") | |
def on_stop_capture(self, task): | |
"""Do any quick work to stop things that are capturing data""" | |
AndroidBrowser.on_stop_capture(self, task) | |
@@ -163,56 +183,65 @@ | |
"""Start any processing of the captured data""" | |
AndroidBrowser.on_start_processing(self, task) | |
def clear_profile(self, _): | |
"""Clear the browser profile""" | |
- if 'clearProfile' in self.config and self.config['clearProfile']: | |
- self.adb.shell(['pm', 'clear', self.config['package']]) | |
- elif 'directories' in self.config: | |
- remove = ' ' | |
- for directory in self.config['directories']: | |
- remove += ' "/data/data/{0}/{1}"'.format(self.config['package'], directory) | |
+ if "clearProfile" in self.config and self.config["clearProfile"]: | |
+ self.adb.shell(["pm", "clear", self.config["package"]]) | |
+ elif "directories" in self.config: | |
+ remove = " " | |
+ for directory in self.config["directories"]: | |
+ remove += ' "/data/data/{0}/{1}"'.format( | |
+ self.config["package"], directory | |
+ ) | |
if len(remove): | |
- self.adb.su('rm -r' + remove) | |
+ self.adb.su("rm -r" + remove) | |
def ensure_xml_setting(self, settings, key, value): | |
"""Make sure the provided setting exists in the setting string""" | |
if settings.find('name="{0}" value="{1}"'.format(key, value)) == -1: | |
modified = True | |
- settings = re.sub(r'name=\"{0}\" value=\"[^\"]\"'.format(key), | |
- 'name="{0}" value="{1}"'.format(key, value), settings) | |
+ settings = re.sub( | |
+ r"name=\"{0}\" value=\"[^\"]\"".format(key), | |
+ 'name="{0}" value="{1}"'.format(key, value), | |
+ settings, | |
+ ) | |
if settings.find('name="{0}" value="{1}"'.format(key, value)) == -1: | |
- settings = settings.replace('\n</map>', | |
- '\n <int name="{0}" value="{1}" />\n</map>'.format(key, value)) | |
+ settings = settings.replace( | |
+ "\n</map>", | |
+ '\n <int name="{0}" value="{1}" />\n</map>'.format(key, value), | |
+ ) | |
return settings | |
def prepare_opera_mini_settings(self): | |
"""Configure the data saver settings""" | |
compression = "1" | |
if "mode" in self.config: | |
if self.config["mode"].find("high") >= 0: | |
compression = "0" | |
- settings_file = "/data/data/{0}/shared_prefs/user_settings.xml".format(self.config['package']) | |
- settings = self.adb.su('cat ' + settings_file).replace("\r", "") | |
+ settings_file = "/data/data/{0}/shared_prefs/user_settings.xml".format( | |
+ self.config["package"] | |
+ ) | |
+ settings = self.adb.su("cat " + settings_file).replace("\r", "") | |
original_settings = str(settings) | |
# make sure ad blocking and compression are at least enabled | |
settings = self.ensure_xml_setting(settings, "obml_ad_blocking", "1") | |
settings = self.ensure_xml_setting(settings, "compression_enabled", "1") | |
settings = self.ensure_xml_setting(settings, "compression", compression) | |
if settings != original_settings: | |
- local_settings = os.path.join(self.task['dir'], 'user_settings.xml') | |
- remote_temp = '/data/local/tmp/user_settings.xml' | |
- with open(local_settings, 'wb') as f_out: | |
+ local_settings = os.path.join(self.task["dir"], "user_settings.xml") | |
+ remote_temp = "/data/local/tmp/user_settings.xml" | |
+ with open(local_settings, "wb") as f_out: | |
f_out.write(settings) | |
- if self.adb.adb(['push', local_settings, remote_temp]): | |
- self.adb.su('chmod 666 /data/local/tmp/user_settings.xml') | |
- self.adb.su('cp /data/local/tmp/user_settings.xml ' + settings_file) | |
+ if self.adb.adb(["push", local_settings, remote_temp]): | |
+ self.adb.su("chmod 666 /data/local/tmp/user_settings.xml") | |
+ self.adb.su("cp /data/local/tmp/user_settings.xml " + settings_file) | |
os.remove(local_settings) | |
def wait_for_network_idle(self): | |
"""Wait for 5 one-second intervals that receive less than 1KB""" | |
- logging.debug('Waiting for network idle') | |
+ logging.debug("Waiting for network idle") | |
end_time = monotonic.monotonic() + 60 | |
self.adb.get_bytes_rx() | |
idle_count = 0 | |
while idle_count < 5 and monotonic.monotonic() < end_time: | |
time.sleep(1) | |
@@ -223,35 +252,39 @@ | |
else: | |
idle_count += 1 | |
def wait_for_page_load(self): | |
"""Once the video starts growing, wait for it to stop""" | |
- logging.debug('Waiting for the page to load') | |
+ logging.debug("Waiting for the page to load") | |
# Wait for the video to start (up to 30 seconds) | |
end_startup = monotonic.monotonic() + 30 | |
- end_time = monotonic.monotonic() + self.task['time_limit'] | |
+ end_time = monotonic.monotonic() + self.task["time_limit"] | |
last_size = self.adb.get_video_size() | |
video_started = False | |
bytes_rx = self.adb.get_bytes_rx() | |
while not video_started and monotonic.monotonic() < end_startup: | |
time.sleep(5) | |
video_size = self.adb.get_video_size() | |
bytes_rx = self.adb.get_bytes_rx() | |
delta = video_size - last_size | |
- logging.debug('Video Size: %d bytes (+ %d)', video_size, delta) | |
+ logging.debug("Video Size: %d bytes (+ %d)", video_size, delta) | |
last_size = video_size | |
if delta > 50000: | |
video_started = True | |
# Wait for the activity to stop | |
video_idle_count = 0 | |
while video_idle_count <= 3 and monotonic.monotonic() < end_time: | |
time.sleep(5) | |
video_size = self.adb.get_video_size() | |
bytes_rx = self.adb.get_bytes_rx() | |
delta = video_size - last_size | |
- logging.debug('Video Size: %d bytes (+ %d) - %d bytes received', | |
- video_size, delta, bytes_rx) | |
+ logging.debug( | |
+ "Video Size: %d bytes (+ %d) - %d bytes received", | |
+ video_size, | |
+ delta, | |
+ bytes_rx, | |
+ ) | |
last_size = video_size | |
if delta > 10000 or bytes_rx > 5000: | |
video_idle_count = 0 | |
else: | |
video_idle_count += 1 | |
--- internal/chrome_android.py 2018-10-25 17:07:35.741167 +0000 | |
+++ internal/chrome_android.py 2019-02-06 17:08:26.275310 +0000 | |
@@ -11,40 +11,40 @@ | |
import monotonic | |
from .devtools_browser import DevtoolsBrowser | |
from .android_browser import AndroidBrowser | |
CHROME_COMMAND_LINE_OPTIONS = [ | |
- '--disable-fre', | |
- '--enable-benchmarking', | |
- '--metrics-recording-only', | |
- '--disable-geolocation', | |
- '--disable-background-networking', | |
- '--no-default-browser-check', | |
- '--no-first-run', | |
- '--process-per-tab', | |
- '--disable-infobars', | |
- '--disable-translate', | |
- '--allow-running-insecure-content', | |
- '--disable-save-password-bubble', | |
- '--disable-background-downloads', | |
- '--disable-add-to-shelf', | |
- '--disable-client-side-phishing-detection', | |
- '--disable-datasaver-prompt', | |
- '--disable-device-discovery-notifications', | |
- '--disable-default-apps', | |
- '--disable-domain-reliability', | |
- '--disable-background-timer-throttling', | |
- '--safebrowsing-disable-auto-update', | |
- '--disable-external-intent-requests', | |
- '--enable-remote-debugging', | |
- '--disable-browser-side-navigation', | |
- '--net-log-capture-mode=IncludeCookiesAndCredentials' | |
+ "--disable-fre", | |
+ "--enable-benchmarking", | |
+ "--metrics-recording-only", | |
+ "--disable-geolocation", | |
+ "--disable-background-networking", | |
+ "--no-default-browser-check", | |
+ "--no-first-run", | |
+ "--process-per-tab", | |
+ "--disable-infobars", | |
+ "--disable-translate", | |
+ "--allow-running-insecure-content", | |
+ "--disable-save-password-bubble", | |
+ "--disable-background-downloads", | |
+ "--disable-add-to-shelf", | |
+ "--disable-client-side-phishing-detection", | |
+ "--disable-datasaver-prompt", | |
+ "--disable-device-discovery-notifications", | |
+ "--disable-default-apps", | |
+ "--disable-domain-reliability", | |
+ "--disable-background-timer-throttling", | |
+ "--safebrowsing-disable-auto-update", | |
+ "--disable-external-intent-requests", | |
+ "--enable-remote-debugging", | |
+ "--disable-browser-side-navigation", | |
+ "--net-log-capture-mode=IncludeCookiesAndCredentials", | |
] | |
HOST_RULES = [ | |
'"MAP cache.pack.google.com 127.0.0.1"', | |
- '"MAP clients1.google.com 127.0.0.1"' | |
+ '"MAP clients1.google.com 127.0.0.1"', | |
] | |
""" Orange page | |
<html> | |
<head> | |
@@ -54,101 +54,128 @@ | |
</style> | |
</head> | |
<body><div id='wptorange'></div></body> | |
</html> | |
""" | |
-START_PAGE = 'data:text/html,%3Chtml%3E%0D%0A%3Chead%3E%0D%0A%3Cstyle%3E%0D%0Abody%20%7B'\ | |
- 'background-color%3A%20white%3B%20margin%3A%200%3B%7D%0D%0A%23wptorange%20%7B'\ | |
- 'width%3A100%25%3B%20height%3A%20100%25%3B%20background-color'\ | |
- '%3A%20%23DE640D%3B%7D%0D%0A%3C%2Fstyle%3E%0D%0A%3C%2Fhead%3E%0D%0A%3Cbody%3E%3C'\ | |
- 'div%20id%3D%27wptorange%27%3E%3C%2Fdiv%3E%3C%2Fbody%3E%0D%0A%3C%2Fhtml%3E' | |
+START_PAGE = ( | |
+ "data:text/html,%3Chtml%3E%0D%0A%3Chead%3E%0D%0A%3Cstyle%3E%0D%0Abody%20%7B" | |
+ "background-color%3A%20white%3B%20margin%3A%200%3B%7D%0D%0A%23wptorange%20%7B" | |
+ "width%3A100%25%3B%20height%3A%20100%25%3B%20background-color" | |
+ "%3A%20%23DE640D%3B%7D%0D%0A%3C%2Fstyle%3E%0D%0A%3C%2Fhead%3E%0D%0A%3Cbody%3E%3C" | |
+ "div%20id%3D%27wptorange%27%3E%3C%2Fdiv%3E%3C%2Fbody%3E%0D%0A%3C%2Fhtml%3E" | |
+) | |
+ | |
class ChromeAndroid(AndroidBrowser, DevtoolsBrowser): | |
"""Chrome browser on Android""" | |
+ | |
def __init__(self, adb, config, options, job): | |
self.adb = adb | |
self.options = options | |
self.config = dict(config) | |
# default (overridable) configs | |
- self.config['command_line_file'] = 'chrome-command-line' | |
+ self.config["command_line_file"] = "chrome-command-line" | |
# pull in the APK info for the browser | |
- if 'apk_info' in job and 'packages' in job['apk_info'] and \ | |
- self.config['package'] in job['apk_info']['packages']: | |
- apk_info = job['apk_info']['packages'][self.config['package']] | |
- self.config['apk_url'] = apk_info['apk_url'] | |
- self.config['md5'] = apk_info['md5'].lower() | |
+ if ( | |
+ "apk_info" in job | |
+ and "packages" in job["apk_info"] | |
+ and self.config["package"] in job["apk_info"]["packages"] | |
+ ): | |
+ apk_info = job["apk_info"]["packages"][self.config["package"]] | |
+ self.config["apk_url"] = apk_info["apk_url"] | |
+ self.config["md5"] = apk_info["md5"].lower() | |
# pull in the settings for a custom browser into the config | |
- if 'customBrowser_package' in job: | |
- self.config['package'] = job['customBrowser_package'] | |
- if 'customBrowser_activity' in job: | |
- self.config['activity'] = job['customBrowser_activity'] | |
- if 'customBrowserUrl' in job: | |
- self.config['apk_url'] = job['customBrowserUrl'] | |
- if 'customBrowserMD5' in job: | |
- self.config['md5'] = job['customBrowserMD5'].lower() | |
- if 'customBrowser_flagsFile' in job: | |
- self.config['command_line_file'] = os.path.basename(job['customBrowser_flagsFile']) | |
+ if "customBrowser_package" in job: | |
+ self.config["package"] = job["customBrowser_package"] | |
+ if "customBrowser_activity" in job: | |
+ self.config["activity"] = job["customBrowser_activity"] | |
+ if "customBrowserUrl" in job: | |
+ self.config["apk_url"] = job["customBrowserUrl"] | |
+ if "customBrowserMD5" in job: | |
+ self.config["md5"] = job["customBrowserMD5"].lower() | |
+ if "customBrowser_flagsFile" in job: | |
+ self.config["command_line_file"] = os.path.basename( | |
+ job["customBrowser_flagsFile"] | |
+ ) | |
AndroidBrowser.__init__(self, adb, options, job, self.config) | |
DevtoolsBrowser.__init__(self, options, job, use_devtools_video=False) | |
self.devtools_screenshot = False | |
self.connected = False | |
def prepare(self, job, task): | |
"""Prepare the profile/OS for the browser""" | |
self.task = task | |
AndroidBrowser.prepare(self, job, task) | |
try: | |
- self.adb.adb(['forward', '--remove', 'tcp:{0}'.format(task['port'])]) | |
+ self.adb.adb(["forward", "--remove", "tcp:{0}".format(task["port"])]) | |
# clear the profile if necessary | |
- if task['cached']: | |
- self.adb.su('rm -r /data/data/' + self.config['package'] + '/app_tabs') | |
+ if task["cached"]: | |
+ self.adb.su("rm -r /data/data/" + self.config["package"] + "/app_tabs") | |
else: | |
self.clear_profile(task) | |
except Exception as err: | |
logging.exception("Exception preparing Browser: %s", err.__str__()) | |
def launch(self, job, task): | |
"""Launch the browser""" | |
args = list(CHROME_COMMAND_LINE_OPTIONS) | |
host_rules = list(HOST_RULES) | |
- if 'host_rules' in task: | |
- host_rules.extend(task['host_rules']) | |
- args.append('--host-resolver-rules=' + ','.join(host_rules)) | |
- if 'ignoreSSL' in job and job['ignoreSSL']: | |
- args.append('--ignore-certificate-errors') | |
- if 'netlog' in job and job['netlog']: | |
- self.adb.shell(['rm', '/data/local/tmp/netlog.txt']) | |
- args.append('--log-net-log=/data/local/tmp/netlog.txt') | |
- if 'overrideHosts' in task and task['overrideHosts']: | |
- args.append('--enable-features=NetworkService') | |
- command_line = 'chrome ' + ' '.join(args) | |
- if 'addCmdLine' in job: | |
- command_line += ' ' + job['addCmdLine'] | |
- command_line += ' about:blank' | |
- local_command_line = os.path.join(task['dir'], self.config['command_line_file']) | |
- remote_command_line = '/data/local/tmp/' + self.config['command_line_file'] | |
- root_command_line = '/data/local/' + self.config['command_line_file'] | |
+ if "host_rules" in task: | |
+ host_rules.extend(task["host_rules"]) | |
+ args.append("--host-resolver-rules=" + ",".join(host_rules)) | |
+ if "ignoreSSL" in job and job["ignoreSSL"]: | |
+ args.append("--ignore-certificate-errors") | |
+ if "netlog" in job and job["netlog"]: | |
+ self.adb.shell(["rm", "/data/local/tmp/netlog.txt"]) | |
+ args.append("--log-net-log=/data/local/tmp/netlog.txt") | |
+ if "overrideHosts" in task and task["overrideHosts"]: | |
+ args.append("--enable-features=NetworkService") | |
+ command_line = "chrome " + " ".join(args) | |
+ if "addCmdLine" in job: | |
+ command_line += " " + job["addCmdLine"] | |
+ command_line += " about:blank" | |
+ local_command_line = os.path.join(task["dir"], self.config["command_line_file"]) | |
+ remote_command_line = "/data/local/tmp/" + self.config["command_line_file"] | |
+ root_command_line = "/data/local/" + self.config["command_line_file"] | |
logging.debug(command_line) | |
- with open(local_command_line, 'wb') as f_out: | |
+ with open(local_command_line, "wb") as f_out: | |
f_out.write(command_line) | |
- if self.adb.adb(['push', local_command_line, remote_command_line]): | |
+ if self.adb.adb(["push", local_command_line, remote_command_line]): | |
os.remove(local_command_line) | |
# try copying it to /data/local for rooted devices that need it there | |
- if self.adb.su('cp {0} {1}'.format(remote_command_line, root_command_line)) is not None: | |
- self.adb.su('chmod 666 {0}'.format(root_command_line)) | |
+ if ( | |
+ self.adb.su("cp {0} {1}".format(remote_command_line, root_command_line)) | |
+ is not None | |
+ ): | |
+ self.adb.su("chmod 666 {0}".format(root_command_line)) | |
# configure any browser-specific prefs | |
self.setup_prefs() | |
self.configure_prefs() | |
# launch the browser | |
- activity = '{0}/{1}'.format(self.config['package'], self.config['activity']) | |
- self.adb.shell(['am', 'start', '-n', activity, '-a', | |
- 'android.intent.action.VIEW', '-d', START_PAGE]) | |
+ activity = "{0}/{1}".format(self.config["package"], self.config["activity"]) | |
+ self.adb.shell( | |
+ [ | |
+ "am", | |
+ "start", | |
+ "-n", | |
+ activity, | |
+ "-a", | |
+ "android.intent.action.VIEW", | |
+ "-d", | |
+ START_PAGE, | |
+ ] | |
+ ) | |
# port-forward the devtools interface | |
socket_name = self.get_devtools_socket() | |
if socket_name is not None: | |
- if self.adb.adb(['forward', 'tcp:{0}'.format(task['port']), | |
- 'localabstract:{}'.format(socket_name)]): | |
+ if self.adb.adb( | |
+ [ | |
+ "forward", | |
+ "tcp:{0}".format(task["port"]), | |
+ "localabstract:{}".format(socket_name), | |
+ ] | |
+ ): | |
if DevtoolsBrowser.connect(self, task): | |
self.connected = True | |
DevtoolsBrowser.prepare_browser(self, task) | |
DevtoolsBrowser.navigate(self, START_PAGE) | |
time.sleep(0.5) | |
@@ -174,81 +201,85 @@ | |
self.adb.su('chmod 777 {0}'.format(dest)) | |
""" | |
def configure_prefs(self): | |
"""Configure browser-specific shared_prefs""" | |
- if self.config['package'] == 'com.sec.android.app.sbrowser': | |
+ if self.config["package"] == "com.sec.android.app.sbrowser": | |
prefs = { | |
- 'enable_quick_menu': '<boolean name="enable_quick_menu" value="false" />' | |
+ "enable_quick_menu": '<boolean name="enable_quick_menu" value="false" />' | |
} | |
- self.write_prefs(prefs, 'com.sec.android.app.sbrowser_preferences.xml') | |
- elif self.config['package'] == 'com.sec.android.app.sbrowser.beta': | |
+ self.write_prefs(prefs, "com.sec.android.app.sbrowser_preferences.xml") | |
+ elif self.config["package"] == "com.sec.android.app.sbrowser.beta": | |
prefs = { | |
- 'enable_quick_menu': '<boolean name="enable_quick_menu" value="false" />' | |
+ "enable_quick_menu": '<boolean name="enable_quick_menu" value="false" />' | |
} | |
- self.write_prefs(prefs, 'com.sec.android.app.sbrowser.beta_preferences.xml') | |
+ self.write_prefs(prefs, "com.sec.android.app.sbrowser.beta_preferences.xml") | |
def write_prefs(self, prefs, file_base): | |
"""update the prefs xml file""" | |
- prefs_file = '/data/data/{0}/shared_prefs/{1}'.format(self.config['package'], file_base) | |
+ prefs_file = "/data/data/{0}/shared_prefs/{1}".format( | |
+ self.config["package"], file_base | |
+ ) | |
current = None | |
current = self.adb.su('cat "{0}"'.format(prefs_file)) | |
modified = False | |
if current is not None: | |
- out = '' | |
+ out = "" | |
for line in current.splitlines(): | |
line = line.rstrip() | |
# See if it is a pref we need to modify | |
for name in prefs: | |
if line.find('name="{0}"'.format(name)) >= 0: | |
value = prefs[name] | |
if value is not None: | |
if line.find(value) < 0: | |
- logging.debug('Setting pref : %s', value) | |
- line = ' {0}'.format(value) | |
+ logging.debug("Setting pref : %s", value) | |
+ line = " {0}".format(value) | |
prefs.pop(name, None) | |
modified = True | |
break | |
- if line.startswith('</map>'): | |
+ if line.startswith("</map>"): | |
# Add any missing prefs | |
for name in prefs: | |
value = prefs[name] | |
if value is not None: | |
- logging.debug('Adding pref : %s', value) | |
- out += ' {0}\n'.format(value) | |
+ logging.debug("Adding pref : %s", value) | |
+ out += " {0}\n".format(value) | |
modified = True | |
- out += line + '\n' | |
+ out += line + "\n" | |
if modified: | |
- local = os.path.join(self.task['dir'], 'pref.xml') | |
- remote = '/data/local/tmp/pref.xml' | |
- with open(local, 'wb') as f_out: | |
+ local = os.path.join(self.task["dir"], "pref.xml") | |
+ remote = "/data/local/tmp/pref.xml" | |
+ with open(local, "wb") as f_out: | |
f_out.write(out) | |
if os.path.isfile(local): | |
- self.adb.shell(['rm', remote]) | |
- if self.adb.adb(['push', local, remote]): | |
- if self.adb.su('cp {0} {1}'.format(remote, prefs_file)) is not None: | |
- self.adb.su('chmod 666 {0}'.format(prefs_file)) | |
- self.adb.shell(['rm', remote]) | |
+ self.adb.shell(["rm", remote]) | |
+ if self.adb.adb(["push", local, remote]): | |
+ if self.adb.su("cp {0} {1}".format(remote, prefs_file)) is not None: | |
+ self.adb.su("chmod 666 {0}".format(prefs_file)) | |
+ self.adb.shell(["rm", remote]) | |
os.remove(local) | |
def get_devtools_socket(self): | |
"""Get the socket name of the remote devtools socket. @..._devtools_remote""" | |
socket_name = None | |
end_time = monotonic.monotonic() + 120 | |
time.sleep(1) | |
while socket_name is None and monotonic.monotonic() < end_time: | |
- out = self.adb.shell(['cat', '/proc/net/unix']) | |
+ out = self.adb.shell(["cat", "/proc/net/unix"]) | |
if out is not None: | |
for line in out.splitlines(): | |
- match = re.search(r'00010000 0001.* @([^\s]+_devtools_remote)', line) | |
+ match = re.search( | |
+ r"00010000 0001.* @([^\s]+_devtools_remote)", line | |
+ ) | |
if match: | |
socket_name = match.group(1) | |
- logging.debug('Remote devtools socket: {0}'.format(socket_name)) | |
+ logging.debug("Remote devtools socket: {0}".format(socket_name)) | |
if socket_name is None: | |
time.sleep(1) | |
if socket_name is None: | |
- logging.debug('Failed to find remote devtools socket') | |
+ logging.debug("Failed to find remote devtools socket") | |
return socket_name | |
def run_task(self, task): | |
"""Run an individual test""" | |
if self.connected: | |
@@ -256,24 +287,24 @@ | |
def stop(self, job, task): | |
"""Stop testing""" | |
if self.connected: | |
DevtoolsBrowser.disconnect(self) | |
- self.adb.adb(['forward', '--remove', 'tcp:{0}'.format(task['port'])]) | |
+ self.adb.adb(["forward", "--remove", "tcp:{0}".format(task["port"])]) | |
# kill the browser | |
- self.adb.shell(['am', 'force-stop', self.config['package']]) | |
- self.adb.shell(['rm', '/data/local/tmp/' + self.config['command_line_file']]) | |
- self.adb.su('rm /data/local/' + self.config['command_line_file']) | |
+ self.adb.shell(["am", "force-stop", self.config["package"]]) | |
+ self.adb.shell(["rm", "/data/local/tmp/" + self.config["command_line_file"]]) | |
+ self.adb.su("rm /data/local/" + self.config["command_line_file"]) | |
# grab the netlog if there was one | |
- if 'netlog' in job and job['netlog']: | |
- netlog_file = os.path.join(task['dir'], task['prefix']) + '_netlog.txt' | |
- self.adb.adb(['pull', '/data/local/tmp/netlog.txt', netlog_file]) | |
- self.adb.shell(['rm', '/data/local/tmp/netlog.txt']) | |
+ if "netlog" in job and job["netlog"]: | |
+ netlog_file = os.path.join(task["dir"], task["prefix"]) + "_netlog.txt" | |
+ self.adb.adb(["pull", "/data/local/tmp/netlog.txt", netlog_file]) | |
+ self.adb.shell(["rm", "/data/local/tmp/netlog.txt"]) | |
if os.path.isfile(netlog_file): | |
- netlog_gzip = netlog_file + '.gz' | |
- with open(netlog_file, 'rb') as f_in: | |
- with gzip.open(netlog_gzip, 'wb', 7) as f_out: | |
+ netlog_gzip = netlog_file + ".gz" | |
+ with open(netlog_file, "rb") as f_in: | |
+ with gzip.open(netlog_gzip, "wb", 7) as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
if os.path.isfile(netlog_gzip): | |
os.remove(netlog_file) | |
def execute_js(self, script): | |
@@ -306,27 +337,32 @@ | |
DevtoolsBrowser.wait_for_processing(self, task) | |
AndroidBrowser.wait_for_processing(self, task) | |
def clear_profile(self, task): | |
"""Clear the browser profile""" | |
- local_command_line = os.path.join(task['dir'], self.config['command_line_file']) | |
- remote_command_line = '/data/local/tmp/' + self.config['command_line_file'] | |
- root_command_line = '/data/local/' + self.config['command_line_file'] | |
+ local_command_line = os.path.join(task["dir"], self.config["command_line_file"]) | |
+ remote_command_line = "/data/local/tmp/" + self.config["command_line_file"] | |
+ root_command_line = "/data/local/" + self.config["command_line_file"] | |
if os.path.isfile(local_command_line): | |
os.remove(local_command_line) | |
- self.adb.shell(['rm', remote_command_line]) | |
+ self.adb.shell(["rm", remote_command_line]) | |
self.adb.su('rm "{0}"'.format(root_command_line)) | |
# Fail gracefully if root access isn't available | |
if self.adb.short_version >= 7.0: | |
- out = self.adb.su('ls -1 /data/data/' + self.config['package']) | |
+ out = self.adb.su("ls -1 /data/data/" + self.config["package"]) | |
else: | |
- out = self.adb.su('ls /data/data/' + self.config['package']) | |
+ out = self.adb.su("ls /data/data/" + self.config["package"]) | |
if out is not None: | |
- remove = '' | |
+ remove = "" | |
for entry in out.splitlines(): | |
entry = entry.strip() | |
- if len(entry) and entry != '.' and entry != '..' and \ | |
- entry != 'lib' and entry != 'shared_prefs': | |
- remove += ' /data/data/' + self.config['package'] + '/' + entry | |
+ if ( | |
+ len(entry) | |
+ and entry != "." | |
+ and entry != ".." | |
+ and entry != "lib" | |
+ and entry != "shared_prefs" | |
+ ): | |
+ remove += " /data/data/" + self.config["package"] + "/" + entry | |
if len(remove): | |
- self.adb.su('rm -r' + remove) | |
- | |
+ self.adb.su("rm -r" + remove) | |
+ | |
--- internal/internet_explorer.py 2018-10-25 17:07:28.863134 +0000 | |
+++ internal/internet_explorer.py 2019-02-06 17:08:26.310191 +0000 | |
@@ -9,40 +9,44 @@ | |
from .os_util import run_elevated | |
class InternetExplorer(Edge): | |
"""Microsoft Edge""" | |
+ | |
def __init__(self, path, options, job): | |
Edge.__init__(self, path, options, job) | |
self.supports_interactive = False | |
- self.start_page = 'http://127.0.0.1:8888/orange.html' | |
+ self.start_page = "http://127.0.0.1:8888/orange.html" | |
def get_driver(self, task): | |
"""Get the webdriver instance""" | |
from selenium import webdriver | |
- path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'IE') | |
- reg_file = os.path.join(path, 'keys.reg') | |
+ | |
+ path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support", "IE") | |
+ reg_file = os.path.join(path, "keys.reg") | |
if os.path.isfile(reg_file): | |
- run_elevated('reg', 'IMPORT "{0}"'.format(reg_file)) | |
- if platform.machine().endswith('64'): | |
- path = os.path.join(path, 'amd64', 'IEDriverServer.exe') | |
+ run_elevated("reg", 'IMPORT "{0}"'.format(reg_file)) | |
+ if platform.machine().endswith("64"): | |
+ path = os.path.join(path, "amd64", "IEDriverServer.exe") | |
else: | |
- path = os.path.join(path, 'x86', 'IEDriverServer.exe') | |
+ path = os.path.join(path, "x86", "IEDriverServer.exe") | |
capabilities = webdriver.DesiredCapabilities.INTERNETEXPLORER.copy() | |
- capabilities['ie.enableFullPageScreenshot'] = False | |
- if not task['cached']: | |
- capabilities['ie.ensureCleanSession'] = True | |
+ capabilities["ie.enableFullPageScreenshot"] = False | |
+ if not task["cached"]: | |
+ capabilities["ie.ensureCleanSession"] = True | |
driver = webdriver.Ie(executable_path=path, capabilities=capabilities) | |
return driver | |
def prepare(self, job, task): | |
Edge.prepare(self, job, task) | |
try: | |
import _winreg | |
- reg_path = 'Software\\Microsoft\\Windows\\CurrentVersion\\' \ | |
- 'Internet Settings\\5.0\\User Agent\\Post Platform' | |
+ | |
+ reg_path = ( | |
+ "Software\\Microsoft\\Windows\\CurrentVersion\\" | |
+ "Internet Settings\\5.0\\User Agent\\Post Platform" | |
+ ) | |
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, reg_path) | |
# Delete any string modifiers currently in the registry | |
values = [] | |
try: | |
index = 0 | |
@@ -52,22 +56,21 @@ | |
index += 1 | |
except Exception: | |
pass | |
for value in values: | |
_winreg.DeleteValue(key, value) | |
- if 'AppendUA' in task and len(task['AppendUA']): | |
- _winreg.SetValueEx(key, task['AppendUA'], 0, | |
- _winreg.REG_SZ, 'IEAK') | |
+ if "AppendUA" in task and len(task["AppendUA"]): | |
+ _winreg.SetValueEx(key, task["AppendUA"], 0, _winreg.REG_SZ, "IEAK") | |
except Exception: | |
- logging.exception('Error writing registry key') | |
+ logging.exception("Error writing registry key") | |
def kill(self): | |
"""Kill any running instances""" | |
- processes = ['iexplore.exe', 'smartscreen.exe', 'dllhost.exe'] | |
+ processes = ["iexplore.exe", "smartscreen.exe", "dllhost.exe"] | |
for exe in processes: | |
try: | |
- run_elevated('taskkill', '/F /T /IM {0}'.format(exe)) | |
+ run_elevated("taskkill", "/F /T /IM {0}".format(exe)) | |
except Exception: | |
pass | |
def clear_cache(self): | |
"""Clear the browser cache""" | |
--- internal/android_browser.py 2019-01-08 01:37:06.387429 +0000 | |
+++ internal/android_browser.py 2019-02-06 17:08:26.331405 +0000 | |
@@ -12,149 +12,165 @@ | |
import monotonic | |
import ujson as json | |
from .base_browser import BaseBrowser | |
-SET_ORANGE = "(function() {" \ | |
- "var wptDiv = document.getElementById('wptorange');" \ | |
- "if (!wptDiv) {" \ | |
- "wptDiv = document.createElement('div');" \ | |
- "wptDiv.id = 'wptorange';" \ | |
- "wptDiv.style.position = 'absolute';" \ | |
- "wptDiv.style.top = '0';" \ | |
- "wptDiv.style.left = '0';" \ | |
- "wptDiv.style.width = Math.max(document.documentElement.clientWidth, document.body.clientWidth || 0, window.clientWidth || 0) + 'px';" \ | |
- "wptDiv.style.height = Math.max(document.documentElement.clientHeight, document.body.clientHeight || 0, window.innerHeight || 0) + 'px';" \ | |
- "wptDiv.style.zIndex = '2147483647';" \ | |
- "wptDiv.style.backgroundColor = '#DE640D';" \ | |
- "document.body.appendChild(wptDiv);" \ | |
- "}})();" | |
+SET_ORANGE = ( | |
+ "(function() {" | |
+ "var wptDiv = document.getElementById('wptorange');" | |
+ "if (!wptDiv) {" | |
+ "wptDiv = document.createElement('div');" | |
+ "wptDiv.id = 'wptorange';" | |
+ "wptDiv.style.position = 'absolute';" | |
+ "wptDiv.style.top = '0';" | |
+ "wptDiv.style.left = '0';" | |
+ "wptDiv.style.width = Math.max(document.documentElement.clientWidth, document.body.clientWidth || 0, window.clientWidth || 0) + 'px';" | |
+ "wptDiv.style.height = Math.max(document.documentElement.clientHeight, document.body.clientHeight || 0, window.innerHeight || 0) + 'px';" | |
+ "wptDiv.style.zIndex = '2147483647';" | |
+ "wptDiv.style.backgroundColor = '#DE640D';" | |
+ "document.body.appendChild(wptDiv);" | |
+ "}})();" | |
+) | |
class AndroidBrowser(BaseBrowser): | |
"""Android Browser base""" | |
+ | |
def __init__(self, adb, options, job, config): | |
BaseBrowser.__init__(self) | |
self.adb = adb | |
self.job = job | |
self.options = options | |
self.config = config | |
self.video_processing = None | |
self.tcpdump_processing = None | |
self.task = None | |
- self.video_enabled = bool(job['video']) | |
- self.tcpdump_enabled = bool('tcpdump' in job and job['tcpdump']) | |
+ self.video_enabled = bool(job["video"]) | |
+ self.tcpdump_enabled = bool("tcpdump" in job and job["tcpdump"]) | |
self.tcpdump_file = None | |
- if self.config['type'] == 'blackbox': | |
+ if self.config["type"] == "blackbox": | |
self.tcpdump_enabled = True | |
self.video_enabled = True | |
def prepare(self, job, task): | |
"""Prepare the browser and OS""" | |
self.task = task | |
self.adb.cleanup_device() | |
self.stop_all_browsers() | |
proxies = {"http": None, "https": None} | |
# Download and install the APK if necessary | |
- if 'apk_url' in self.config and 'md5' in self.config: | |
- if not os.path.isdir(job['persistent_dir']): | |
- os.makedirs(job['persistent_dir']) | |
- last_install_file = os.path.join(job['persistent_dir'], | |
- self.config['package'] + '.md5') | |
+ if "apk_url" in self.config and "md5" in self.config: | |
+ if not os.path.isdir(job["persistent_dir"]): | |
+ os.makedirs(job["persistent_dir"]) | |
+ last_install_file = os.path.join( | |
+ job["persistent_dir"], self.config["package"] + ".md5" | |
+ ) | |
last_md5 = None | |
if os.path.isfile(last_install_file): | |
- with open(last_install_file, 'rb') as f_in: | |
+ with open(last_install_file, "rb") as f_in: | |
last_md5 = f_in.read() | |
- if last_md5 is None or last_md5 != self.config['md5']: | |
+ if last_md5 is None or last_md5 != self.config["md5"]: | |
valid = False | |
- tmp_file = os.path.join(job['persistent_dir'], | |
- self.config['package'] + '.apk') | |
+ tmp_file = os.path.join( | |
+ job["persistent_dir"], self.config["package"] + ".apk" | |
+ ) | |
if os.path.isfile(tmp_file): | |
try: | |
os.remove(tmp_file) | |
except Exception: | |
pass | |
md5_hash = hashlib.md5() | |
try: | |
- logging.debug('Downloading browser update: %s to %s', | |
- self.config['apk_url'], tmp_file) | |
+ logging.debug( | |
+ "Downloading browser update: %s to %s", | |
+ self.config["apk_url"], | |
+ tmp_file, | |
+ ) | |
import requests | |
- request = requests.get(self.config['apk_url'], stream=True, proxies=proxies) | |
+ | |
+ request = requests.get( | |
+ self.config["apk_url"], stream=True, proxies=proxies | |
+ ) | |
if request.status_code == 200: | |
- with open(tmp_file, 'wb') as f_out: | |
+ with open(tmp_file, "wb") as f_out: | |
for chunk in request.iter_content(chunk_size=4096): | |
f_out.write(chunk) | |
md5_hash.update(chunk) | |
md5 = md5_hash.hexdigest().lower() | |
- if md5 == self.config['md5']: | |
+ if md5 == self.config["md5"]: | |
valid = True | |
except Exception: | |
pass | |
if os.path.isfile(tmp_file): | |
if valid: | |
- logging.debug('Installing browser APK') | |
- self.adb.adb(['install', '-rg', tmp_file]) | |
- with open(last_install_file, 'wb') as f_out: | |
+ logging.debug("Installing browser APK") | |
+ self.adb.adb(["install", "-rg", tmp_file]) | |
+ with open(last_install_file, "wb") as f_out: | |
f_out.write(md5) | |
else: | |
- logging.error('Error downloading browser APK') | |
+ logging.error("Error downloading browser APK") | |
try: | |
os.remove(tmp_file) | |
except Exception: | |
pass | |
# kill any running instances | |
- self.adb.shell(['am', 'force-stop', self.config['package']]) | |
+ self.adb.shell(["am", "force-stop", self.config["package"]]) | |
def stop_all_browsers(self): | |
"""Kill all instances of known browsers""" | |
- out = self.adb.shell(['ps'], silent=True) | |
+ out = self.adb.shell(["ps"], silent=True) | |
found_browsers = [] | |
- all_browsers = self.config['all'] | |
+ all_browsers = self.config["all"] | |
for line in out.splitlines(): | |
for name in all_browsers: | |
browser_info = all_browsers[name] | |
- if name not in found_browsers and 'package' in browser_info and \ | |
- line.find(browser_info['package']) >= 0: | |
+ if ( | |
+ name not in found_browsers | |
+ and "package" in browser_info | |
+ and line.find(browser_info["package"]) >= 0 | |
+ ): | |
found_browsers.append(name) | |
if len(found_browsers): | |
for name in found_browsers: | |
- package = all_browsers[name]['package'] | |
- self.adb.shell(['am', 'force-stop', package]) | |
+ package = all_browsers[name]["package"] | |
+ self.adb.shell(["am", "force-stop", package]) | |
def execute_js(self, _script): | |
"""Run javascipt (stub for overriding""" | |
return None | |
def prepare_script_for_record(self, script): | |
"""Convert a script command into one that first removes the orange frame""" | |
- return "(function() {" \ | |
- "var wptDiv = document.getElementById('wptorange');" \ | |
- "if(wptDiv) {wptDiv.parentNode.removeChild(wptDiv);}" \ | |
- "window.requestAnimationFrame(function(){" \ | |
- "window.requestAnimationFrame(function(){" + script + "});"\ | |
- "});" \ | |
- "})();" | |
+ return ( | |
+ "(function() {" | |
+ "var wptDiv = document.getElementById('wptorange');" | |
+ "if(wptDiv) {wptDiv.parentNode.removeChild(wptDiv);}" | |
+ "window.requestAnimationFrame(function(){" | |
+ "window.requestAnimationFrame(function(){" + script + "});" | |
+ "});" | |
+ "})();" | |
+ ) | |
def on_start_recording(self, task): | |
"""Notification that we are about to start an operation that needs to be recorded""" | |
- if task['log_data']: | |
- task['page_data']['osVersion'] = self.adb.version | |
- task['page_data']['os_version'] = self.adb.version | |
- version = self.adb.get_package_version(self.config['package']) | |
+ if task["log_data"]: | |
+ task["page_data"]["osVersion"] = self.adb.version | |
+ task["page_data"]["os_version"] = self.adb.version | |
+ version = self.adb.get_package_version(self.config["package"]) | |
if version is not None: | |
- task['page_data']['browserVersion'] = version | |
- task['page_data']['browser_version'] = version | |
- if not self.job['shaper'].configure(self.job, task): | |
- task['error'] = "Error configuring traffic-shaping" | |
- task['page_data']['result'] = 12999 | |
+ task["page_data"]["browserVersion"] = version | |
+ task["page_data"]["browser_version"] = version | |
+ if not self.job["shaper"].configure(self.job, task): | |
+ task["error"] = "Error configuring traffic-shaping" | |
+ task["page_data"]["result"] = 12999 | |
if self.tcpdump_enabled: | |
self.adb.start_tcpdump() | |
- if self.video_enabled and not self.job['disable_video']: | |
- if task['navigated']: | |
+ if self.video_enabled and not self.job["disable_video"]: | |
+ if task["navigated"]: | |
self.execute_js(SET_ORANGE) | |
time.sleep(0.5) | |
- logging.debug('Starting video capture...') | |
+ logging.debug("Starting video capture...") | |
self.adb.start_screenrecord() | |
if self.tcpdump_enabled or self.video_enabled: | |
time.sleep(2) | |
def on_stop_capture(self, task): | |
@@ -162,138 +178,181 @@ | |
pass | |
def on_stop_recording(self, task): | |
"""Notification that we are done with an operation that needs to be recorded""" | |
if self.tcpdump_enabled: | |
- tcpdump = os.path.join(task['dir'], task['prefix']) + '.cap' | |
+ tcpdump = os.path.join(task["dir"], task["prefix"]) + ".cap" | |
self.adb.stop_tcpdump(tcpdump) | |
- if self.video_enabled and not self.job['disable_video']: | |
- task['video_file'] = os.path.join(task['dir'], task['prefix']) + '_video.mp4' | |
- self.adb.stop_screenrecord(task['video_file']) | |
+ if self.video_enabled and not self.job["disable_video"]: | |
+ task["video_file"] = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_video.mp4" | |
+ ) | |
+ self.adb.stop_screenrecord(task["video_file"]) | |
# kick off the video processing (async) | |
- if os.path.isfile(task['video_file']): | |
- video_path = os.path.join(task['dir'], task['video_subdirectory']) | |
- support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
- if task['current_step'] == 1: | |
- filename = '{0:d}.{1:d}.histograms.json.gz'.format(task['run'], | |
- task['cached']) | |
+ if os.path.isfile(task["video_file"]): | |
+ video_path = os.path.join(task["dir"], task["video_subdirectory"]) | |
+ support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
+ if task["current_step"] == 1: | |
+ filename = "{0:d}.{1:d}.histograms.json.gz".format( | |
+ task["run"], task["cached"] | |
+ ) | |
else: | |
- filename = '{0:d}.{1:d}.{2:d}.histograms.json.gz'.format(task['run'], | |
- task['cached'], | |
- task['current_step']) | |
- histograms = os.path.join(task['dir'], filename) | |
- progress_file = os.path.join(task['dir'], task['prefix']) + \ | |
- '_visual_progress.json.gz' | |
+ filename = "{0:d}.{1:d}.{2:d}.histograms.json.gz".format( | |
+ task["run"], task["cached"], task["current_step"] | |
+ ) | |
+ histograms = os.path.join(task["dir"], filename) | |
+ progress_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) | |
+ + "_visual_progress.json.gz" | |
+ ) | |
visualmetrics = os.path.join(support_path, "visualmetrics.py") | |
- args = ['python', visualmetrics, '-i', task['video_file'], | |
- '-d', video_path, '--force', '--quality', | |
- '{0:d}'.format(self.job['imageQuality']), | |
- '--viewport', '--maxframes', '50', '--histogram', histograms, | |
- '--progress', progress_file] | |
- if 'debug' in self.job and self.job['debug']: | |
- args.append('-vvvv') | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
- hero_elements_file = os.path.join(task['dir'], task['prefix']) + '_hero_elements.json.gz' | |
- args.extend(['--herodata', hero_elements_file]) | |
- if 'renderVideo' in self.job and self.job['renderVideo']: | |
- video_out = os.path.join(task['dir'], task['prefix']) + '_rendered_video.mp4' | |
- args.extend(['--render', video_out]) | |
- if 'fullSizeVideo' in self.job and self.job['fullSizeVideo']: | |
- args.append('--full') | |
- if 'thumbsize' in self.job: | |
+ args = [ | |
+ "python", | |
+ visualmetrics, | |
+ "-i", | |
+ task["video_file"], | |
+ "-d", | |
+ video_path, | |
+ "--force", | |
+ "--quality", | |
+ "{0:d}".format(self.job["imageQuality"]), | |
+ "--viewport", | |
+ "--maxframes", | |
+ "50", | |
+ "--histogram", | |
+ histograms, | |
+ "--progress", | |
+ progress_file, | |
+ ] | |
+ if "debug" in self.job and self.job["debug"]: | |
+ args.append("-vvvv") | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
+ hero_elements_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) | |
+ + "_hero_elements.json.gz" | |
+ ) | |
+ args.extend(["--herodata", hero_elements_file]) | |
+ if "renderVideo" in self.job and self.job["renderVideo"]: | |
+ video_out = ( | |
+ os.path.join(task["dir"], task["prefix"]) | |
+ + "_rendered_video.mp4" | |
+ ) | |
+ args.extend(["--render", video_out]) | |
+ if "fullSizeVideo" in self.job and self.job["fullSizeVideo"]: | |
+ args.append("--full") | |
+ if "thumbsize" in self.job: | |
try: | |
- thumbsize = int(self.job['thumbsize']) | |
+ thumbsize = int(self.job["thumbsize"]) | |
if thumbsize > 0 and thumbsize <= 2000: | |
- args.extend(['--thumbsize', str(thumbsize)]) | |
+ args.extend(["--thumbsize", str(thumbsize)]) | |
except Exception: | |
pass | |
- if 'videoFlags' in self.config: | |
- args.extend(self.config['videoFlags']) | |
+ if "videoFlags" in self.config: | |
+ args.extend(self.config["videoFlags"]) | |
else: | |
- args.append('--orange') | |
- logging.debug(' '.join(args)) | |
+ args.append("--orange") | |
+ logging.debug(" ".join(args)) | |
self.video_processing = subprocess.Popen(args) | |
- self.job['shaper'].reset() | |
+ self.job["shaper"].reset() | |
def on_start_processing(self, task): | |
"""Start any processing of the captured data""" | |
if self.tcpdump_enabled: | |
- tcpdump = os.path.join(task['dir'], task['prefix']) + '.cap' | |
+ tcpdump = os.path.join(task["dir"], task["prefix"]) + ".cap" | |
if os.path.isfile(tcpdump): | |
- pcap_out = tcpdump + '.gz' | |
- with open(tcpdump, 'rb') as f_in: | |
- with gzip.open(pcap_out, 'wb', 7) as f_out: | |
+ pcap_out = tcpdump + ".gz" | |
+ with open(tcpdump, "rb") as f_in: | |
+ with gzip.open(pcap_out, "wb", 7) as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
if os.path.isfile(pcap_out): | |
os.remove(tcpdump) | |
self.tcpdump_file = pcap_out | |
- path_base = os.path.join(task['dir'], task['prefix']) | |
- slices_file = path_base + '_pcap_slices.json.gz' | |
- pcap_parser = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', "pcap-parser.py") | |
- cmd = ['python', pcap_parser, '--json', '-i', pcap_out, '-d', slices_file] | |
- logging.debug(' '.join(cmd)) | |
- self.tcpdump_processing = subprocess.Popen(cmd, stdout=subprocess.PIPE, | |
- stderr=subprocess.PIPE) | |
+ path_base = os.path.join(task["dir"], task["prefix"]) | |
+ slices_file = path_base + "_pcap_slices.json.gz" | |
+ pcap_parser = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "pcap-parser.py", | |
+ ) | |
+ cmd = [ | |
+ "python", | |
+ pcap_parser, | |
+ "--json", | |
+ "-i", | |
+ pcap_out, | |
+ "-d", | |
+ slices_file, | |
+ ] | |
+ logging.debug(" ".join(cmd)) | |
+ self.tcpdump_processing = subprocess.Popen( | |
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE | |
+ ) | |
def wait_for_processing(self, task): | |
"""Wait for any background processing threads to finish""" | |
if self.video_processing is not None: | |
self.video_processing.communicate() | |
self.video_processing = None | |
- if not self.job['keepvideo']: | |
+ if not self.job["keepvideo"]: | |
try: | |
- os.remove(task['video_file']) | |
+ os.remove(task["video_file"]) | |
except Exception: | |
pass | |
if self.tcpdump_processing is not None: | |
try: | |
stdout, _ = self.tcpdump_processing.communicate() | |
if stdout is not None: | |
result = json.loads(stdout) | |
if result: | |
- if 'in' in result: | |
- task['page_data']['pcapBytesIn'] = result['in'] | |
- if 'out' in result: | |
- task['page_data']['pcapBytesOut'] = result['out'] | |
- if 'in_dup' in result: | |
- task['page_data']['pcapBytesInDup'] = result['in_dup'] | |
- if 'tcpdump' not in self.job or not self.job['tcpdump']: | |
+ if "in" in result: | |
+ task["page_data"]["pcapBytesIn"] = result["in"] | |
+ if "out" in result: | |
+ task["page_data"]["pcapBytesOut"] = result["out"] | |
+ if "in_dup" in result: | |
+ task["page_data"]["pcapBytesInDup"] = result["in_dup"] | |
+ if "tcpdump" not in self.job or not self.job["tcpdump"]: | |
if self.tcpdump_file is not None: | |
os.remove(self.tcpdump_file) | |
except Exception: | |
pass | |
def step_complete(self, task): | |
"""All of the processing for the current test step is complete""" | |
# Write out the accumulated page_data | |
- if task['log_data'] and task['page_data']: | |
- if 'browser' in self.job: | |
- task['page_data']['browser_name'] = self.job['browser'] | |
- if 'step_name' in task: | |
- task['page_data']['eventName'] = task['step_name'] | |
- if 'run_start_time' in task: | |
- task['page_data']['test_run_time_ms'] = \ | |
- int(round((monotonic.monotonic() - task['run_start_time']) * 1000.0)) | |
- path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') | |
- json_page_data = json.dumps(task['page_data']) | |
- logging.debug('Page Data: %s', json_page_data) | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ if task["log_data"] and task["page_data"]: | |
+ if "browser" in self.job: | |
+ task["page_data"]["browser_name"] = self.job["browser"] | |
+ if "step_name" in task: | |
+ task["page_data"]["eventName"] = task["step_name"] | |
+ if "run_start_time" in task: | |
+ task["page_data"]["test_run_time_ms"] = int( | |
+ round((monotonic.monotonic() - task["run_start_time"]) * 1000.0) | |
+ ) | |
+ path = os.path.join(task["dir"], task["prefix"] + "_page_data.json.gz") | |
+ json_page_data = json.dumps(task["page_data"]) | |
+ logging.debug("Page Data: %s", json_page_data) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json_page_data) | |
def screenshot(self, task): | |
"""Grab a screenshot using adb""" | |
- png_file = os.path.join(task['dir'], task['prefix'] + '_screen.png') | |
- self.adb.screenshot(png_file, self.job['image_magick']['mogrify']) | |
- task['page_data']['result'] = 0 | |
- task['page_data']['visualTest'] = 1 | |
+ png_file = os.path.join(task["dir"], task["prefix"] + "_screen.png") | |
+ self.adb.screenshot(png_file, self.job["image_magick"]["mogrify"]) | |
+ task["page_data"]["result"] = 0 | |
+ task["page_data"]["visualTest"] = 1 | |
if os.path.isfile(png_file): | |
- if not self.job['pngScreenShot']: | |
- jpeg_file = os.path.join(task['dir'], task['prefix'] + '_screen.jpg') | |
+ if not self.job["pngScreenShot"]: | |
+ jpeg_file = os.path.join(task["dir"], task["prefix"] + "_screen.jpg") | |
command = '{0} "{1}" -resize {2:d}x{2:d} -quality {3:d} "{4}"'.format( | |
- self.job['image_magick']['convert'], | |
- png_file, 600, self.job['imageQuality'], jpeg_file) | |
+ self.job["image_magick"]["convert"], | |
+ png_file, | |
+ 600, | |
+ self.job["imageQuality"], | |
+ jpeg_file, | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(jpeg_file): | |
try: | |
os.remove(png_file) | |
--- internal/message_server.py 2018-12-10 23:23:21.954837 +0000 | |
+++ internal/message_server.py 2019-02-06 17:08:26.530018 +0000 | |
@@ -31,43 +31,48 @@ | |
</html>""" | |
class TornadoRequestHandler(tornado.web.RequestHandler): | |
"""Request handler for when we are using tornado""" | |
+ | |
def get(self): | |
"""Handle GET requests""" | |
import ujson as json | |
+ | |
response = None | |
- content_type = 'text/plain' | |
- if self.request.uri == '/ping': | |
- response = 'pong' | |
- elif self.request.uri == '/blank.html': | |
- content_type = 'text/html' | |
+ content_type = "text/plain" | |
+ if self.request.uri == "/ping": | |
+ response = "pong" | |
+ elif self.request.uri == "/blank.html": | |
+ content_type = "text/html" | |
response = BLANK_PAGE | |
- elif self.request.uri == '/orange.html': | |
- content_type = 'text/html' | |
+ elif self.request.uri == "/orange.html": | |
+ content_type = "text/html" | |
response = ORANGE_PAGE | |
- elif self.request.uri == '/config': | |
+ elif self.request.uri == "/config": | |
# JSON config data | |
- content_type = 'application/json' | |
- response = '{}' | |
+ content_type = "application/json" | |
+ response = "{}" | |
if MESSAGE_SERVER.config is not None: | |
response = json.dumps(MESSAGE_SERVER.config) | |
- elif self.request.uri == '/config.html': | |
+ elif self.request.uri == "/config.html": | |
# Orange HTML page that can be queried from the extension for config data | |
- content_type = 'text/html' | |
+ content_type = "text/html" | |
response = "<html><head>\n" | |
response += "<style>\n" | |
response += "body {background-color: white; margin: 0;}\n" | |
- response += "#wptorange {width:100%; height: 100%; background-color: #DE640D;}\n" | |
+ response += ( | |
+ "#wptorange {width:100%; height: 100%; background-color: #DE640D;}\n" | |
+ ) | |
response += "</style>\n" | |
response += "</head><body><div id='wptorange'></div>\n" | |
if MESSAGE_SERVER.config is not None: | |
import cgi | |
+ | |
response += '<div id="wptagentConfig" style="display: none;">' | |
response += cgi.escape(json.dumps(MESSAGE_SERVER.config)) | |
- response += '</div>' | |
+ response += "</div>" | |
response += "</body></html>" | |
if response is not None: | |
self.set_status(200) | |
self.set_header("Content-Type", content_type) | |
@@ -75,30 +80,32 @@ | |
self.write(response) | |
def post(self): | |
"""Handle POST messages""" | |
import ujson as json | |
+ | |
try: | |
messages = self.request.body | |
if messages is not None and len(messages): | |
- if self.request.uri == '/log': | |
+ if self.request.uri == "/log": | |
logging.debug(messages) | |
else: | |
for line in messages.splitlines(): | |
line = line.strip() | |
if len(line): | |
message = json.loads(line) | |
- if 'body' not in message and self.request.uri != '/etw': | |
- message['body'] = None | |
+ if "body" not in message and self.request.uri != "/etw": | |
+ message["body"] = None | |
MESSAGE_SERVER.handle_message(message) | |
except Exception: | |
pass | |
self.set_status(200) | |
class MessageServer(object): | |
"""Local HTTP server for interacting with the extension""" | |
+ | |
def __init__(self): | |
global MESSAGE_SERVER | |
MESSAGE_SERVER = self | |
self.thread = None | |
self.messages = JoinableQueue() | |
@@ -145,27 +152,30 @@ | |
def is_ok(self): | |
"""Check that the server is responding and restart it if necessary""" | |
import requests | |
import monotonic | |
+ | |
end_time = monotonic.monotonic() + 30 | |
server_ok = False | |
proxies = {"http": None, "https": None} | |
while not server_ok and monotonic.monotonic() < end_time: | |
try: | |
- response = requests.get('http://127.0.0.1:8888/ping', timeout=10, proxies=proxies) | |
- if response.text == 'pong': | |
+ response = requests.get( | |
+ "http://127.0.0.1:8888/ping", timeout=10, proxies=proxies | |
+ ) | |
+ if response.text == "pong": | |
server_ok = True | |
except Exception: | |
pass | |
if not server_ok: | |
time.sleep(5) | |
return server_ok | |
def run(self): | |
"""Main server loop""" | |
- logging.debug('Starting extension server on port 8888') | |
+ logging.debug("Starting extension server on port 8888") | |
application = tornado.web.Application([(r"/.*", TornadoRequestHandler)]) | |
- application.listen(8888, '127.0.0.1') | |
+ application.listen(8888, "127.0.0.1") | |
self.__is_started.set() | |
tornado.ioloop.IOLoop.instance().start() | |
--- internal/os_util.py 2018-10-30 23:38:19.853614 +0000 | |
+++ internal/os_util.py 2019-02-06 17:08:26.778057 +0000 | |
@@ -5,113 +5,132 @@ | |
import logging | |
import os | |
import platform | |
import subprocess | |
+ | |
def kill_all(exe, force, timeout=30): | |
"""Terminate all instances of the given process""" | |
logging.debug("Terminating all instances of %s", exe) | |
plat = platform.system() | |
if plat == "Windows": | |
if force: | |
- subprocess.call(['taskkill', '/F', '/T', '/IM', exe]) | |
+ subprocess.call(["taskkill", "/F", "/T", "/IM", exe]) | |
else: | |
- subprocess.call(['taskkill', '/IM', exe]) | |
+ subprocess.call(["taskkill", "/IM", exe]) | |
elif plat == "Linux" or plat == "Darwin": | |
if force: | |
- subprocess.call(['killall', '-s', 'SIGKILL', exe]) | |
+ subprocess.call(["killall", "-s", "SIGKILL", exe]) | |
else: | |
- subprocess.call(['killall', exe]) | |
+ subprocess.call(["killall", exe]) | |
wait_for_all(exe, timeout) | |
+ | |
def wait_for_all(exe, timeout=30): | |
"""Wait for the given process to exit""" | |
import psutil | |
+ | |
processes = [] | |
for proc in psutil.process_iter(): | |
try: | |
- pinfo = proc.as_dict(attrs=['pid', 'name', 'exe']) | |
+ pinfo = proc.as_dict(attrs=["pid", "name", "exe"]) | |
except psutil.NoSuchProcess: | |
pass | |
else: | |
- if 'exe' in pinfo and pinfo['exe'] is not None and\ | |
- os.path.basename(pinfo['exe']) == exe: | |
+ if ( | |
+ "exe" in pinfo | |
+ and pinfo["exe"] is not None | |
+ and os.path.basename(pinfo["exe"]) == exe | |
+ ): | |
processes.append(proc) | |
if len(processes): | |
logging.debug("Waiting up to %d seconds for %s to exit", timeout, exe) | |
psutil.wait_procs(processes, timeout=timeout) | |
+ | |
def flush_dns(): | |
"""Flush the OS DNS resolver""" | |
logging.debug("Flushing DNS") | |
plat = platform.system() | |
if plat == "Windows": | |
- run_elevated('ipconfig', '/flushdns') | |
+ run_elevated("ipconfig", "/flushdns") | |
elif plat == "Darwin": | |
- subprocess.call(['sudo', 'killall', '-HUP', 'mDNSResponder']) | |
- subprocess.call(['sudo', 'dscacheutil', '-flushcache']) | |
- subprocess.call(['sudo', 'lookupd', '-flushcache']) | |
+ subprocess.call(["sudo", "killall", "-HUP", "mDNSResponder"]) | |
+ subprocess.call(["sudo", "dscacheutil", "-flushcache"]) | |
+ subprocess.call(["sudo", "lookupd", "-flushcache"]) | |
elif plat == "Linux": | |
- subprocess.call(['sudo', 'service', 'dnsmasq', 'restart']) | |
- subprocess.call(['sudo', 'rndc', 'restart']) | |
- subprocess.call(['sudo', 'systemd-resolve', '--flush-caches']) | |
+ subprocess.call(["sudo", "service", "dnsmasq", "restart"]) | |
+ subprocess.call(["sudo", "rndc", "restart"]) | |
+ subprocess.call(["sudo", "systemd-resolve", "--flush-caches"]) | |
+ | |
# pylint: disable=E0611,E0401 | |
def run_elevated(command, args, wait=True): | |
"""Run the given command as an elevated user and wait for it to return""" | |
ret = 1 | |
try: | |
- if command.find(' ') > -1: | |
+ if command.find(" ") > -1: | |
command = '"' + command + '"' | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
import win32api | |
import win32con | |
import win32event | |
import win32process | |
from win32com.shell.shell import ShellExecuteEx | |
from win32com.shell import shellcon | |
- logging.debug(command + ' ' + args) | |
- process_info = ShellExecuteEx(nShow=win32con.SW_HIDE, | |
- fMask=shellcon.SEE_MASK_NOCLOSEPROCESS, | |
- lpVerb='runas', | |
- lpFile=command, | |
- lpParameters=args) | |
+ | |
+ logging.debug(command + " " + args) | |
+ process_info = ShellExecuteEx( | |
+ nShow=win32con.SW_HIDE, | |
+ fMask=shellcon.SEE_MASK_NOCLOSEPROCESS, | |
+ lpVerb="runas", | |
+ lpFile=command, | |
+ lpParameters=args, | |
+ ) | |
if wait: | |
- win32event.WaitForSingleObject(process_info['hProcess'], 600000) | |
- ret = win32process.GetExitCodeProcess(process_info['hProcess']) | |
- win32api.CloseHandle(process_info['hProcess']) | |
+ win32event.WaitForSingleObject(process_info["hProcess"], 600000) | |
+ ret = win32process.GetExitCodeProcess(process_info["hProcess"]) | |
+ win32api.CloseHandle(process_info["hProcess"]) | |
else: | |
ret = process_info | |
else: | |
- logging.debug('sudo ' + command + ' ' + args) | |
- ret = subprocess.call('sudo ' + command + ' ' + args, shell=True) | |
+ logging.debug("sudo " + command + " " + args) | |
+ ret = subprocess.call("sudo " + command + " " + args, shell=True) | |
except Exception: | |
pass | |
return ret | |
+ | |
def wait_for_elevated_process(process_info): | |
- if platform.system() == 'Windows' and 'hProcess' in process_info: | |
+ if platform.system() == "Windows" and "hProcess" in process_info: | |
import win32api | |
import win32con | |
import win32event | |
import win32process | |
- win32event.WaitForSingleObject(process_info['hProcess'], 600000) | |
- ret = win32process.GetExitCodeProcess(process_info['hProcess']) | |
- win32api.CloseHandle(process_info['hProcess']) | |
+ | |
+ win32event.WaitForSingleObject(process_info["hProcess"], 600000) | |
+ ret = win32process.GetExitCodeProcess(process_info["hProcess"]) | |
+ win32api.CloseHandle(process_info["hProcess"]) | |
return ret | |
+ | |
+ | |
# pylint: enable=E0611,E0401 | |
# pylint: disable=E1101 | |
def get_free_disk_space(): | |
"""Return the number of bytes free on the given disk in Gigabytes (floating)""" | |
path = os.path.dirname(os.path.realpath(__file__)) | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
import ctypes | |
+ | |
free_bytes = ctypes.c_ulonglong(0) | |
- ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(path), | |
- None, None, ctypes.pointer(free_bytes)) | |
+ ctypes.windll.kernel32.GetDiskFreeSpaceExW( | |
+ ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes) | |
+ ) | |
return float(free_bytes.value / 1024 / 1024) / 1024.0 | |
else: | |
stat = os.statvfs(path) | |
return float(stat.f_bavail * stat.f_frsize / 1024 / 1024) / 1024.0 | |
+ | |
+ | |
# pylint: enable=E1101 | |
--- internal/desktop_browser.py 2019-01-16 18:58:11.362877 +0000 | |
+++ internal/desktop_browser.py 2019-02-06 17:08:27.194298 +0000 | |
@@ -15,28 +15,31 @@ | |
import time | |
import monotonic | |
import ujson as json | |
from .base_browser import BaseBrowser | |
-SET_ORANGE = "(function() {" \ | |
- "var wptDiv = document.getElementById('wptorange');" \ | |
- "if (!wptDiv) {" \ | |
- "wptDiv = document.createElement('div');" \ | |
- "wptDiv.id = 'wptorange';" \ | |
- "wptDiv.style.position = 'absolute';" \ | |
- "wptDiv.style.top = '0';" \ | |
- "wptDiv.style.left = '0';" \ | |
- "wptDiv.style.width = Math.max(document.documentElement.clientWidth, document.body.clientWidth || 0, window.clientWidth || 0) + 'px';" \ | |
- "wptDiv.style.height = Math.max(document.documentElement.clientHeight, document.body.clientHeight || 0, window.innerHeight || 0) + 'px';" \ | |
- "wptDiv.style.zIndex = '2147483647';" \ | |
- "wptDiv.style.backgroundColor = '#DE640D';" \ | |
- "document.body.appendChild(wptDiv);" \ | |
- "}})();" | |
+SET_ORANGE = ( | |
+ "(function() {" | |
+ "var wptDiv = document.getElementById('wptorange');" | |
+ "if (!wptDiv) {" | |
+ "wptDiv = document.createElement('div');" | |
+ "wptDiv.id = 'wptorange';" | |
+ "wptDiv.style.position = 'absolute';" | |
+ "wptDiv.style.top = '0';" | |
+ "wptDiv.style.left = '0';" | |
+ "wptDiv.style.width = Math.max(document.documentElement.clientWidth, document.body.clientWidth || 0, window.clientWidth || 0) + 'px';" | |
+ "wptDiv.style.height = Math.max(document.documentElement.clientHeight, document.body.clientHeight || 0, window.innerHeight || 0) + 'px';" | |
+ "wptDiv.style.zIndex = '2147483647';" | |
+ "wptDiv.style.backgroundColor = '#DE640D';" | |
+ "document.body.appendChild(wptDiv);" | |
+ "}})();" | |
+) | |
class DesktopBrowser(BaseBrowser): | |
"""Desktop Browser base""" | |
+ | |
START_BROWSER_TIME_LIMIT = 30 | |
def __init__(self, path, options, job): | |
BaseBrowser.__init__(self) | |
self.path = path | |
@@ -46,11 +49,11 @@ | |
self.usage_queue = None | |
self.thread = None | |
self.cleanup_thread = None | |
self.options = options | |
self.interfaces = None | |
- self.tcpdump_enabled = bool('tcpdump' in job and job['tcpdump']) | |
+ self.tcpdump_enabled = bool("tcpdump" in job and job["tcpdump"]) | |
self.tcpdump = None | |
self.ffmpeg = None | |
self.video_capture_running = False | |
self.video_processing = None | |
self.pcap_file = None | |
@@ -61,151 +64,185 @@ | |
self.screen_width = 1920 | |
self.screen_height = 1200 | |
self.device_pixel_ratio = None | |
self.stopping = False | |
self.is_chrome = False | |
- self.support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
+ self.support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
def prepare(self, job, task): | |
"""Prepare the profile/OS for the browser""" | |
self.stopping = False | |
self.task = task | |
self.find_default_interface() | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
self.prepare_windows() | |
self.cleanup_thread = threading.Thread(target=self.background_cleanup) | |
self.cleanup_thread.daemon = True | |
self.cleanup_thread.start() | |
if self.tcpdump_enabled: | |
- os.environ["SSLKEYLOGFILE"] = os.path.join(task['dir'], task['prefix']) + '_keylog.log' | |
+ os.environ["SSLKEYLOGFILE"] = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_keylog.log" | |
+ ) | |
else: | |
- os.environ["SSLKEYLOGFILE"] = '' | |
+ os.environ["SSLKEYLOGFILE"] = "" | |
try: | |
from .os_util import kill_all | |
from .os_util import flush_dns | |
+ | |
logging.debug("Preparing browser") | |
kill_all(os.path.basename(self.path), True) | |
- if 'browser_info' in job and 'other_exes' in job['browser_info']: | |
- for exe in job['browser_info']['other_exes']: | |
+ if "browser_info" in job and "other_exes" in job["browser_info"]: | |
+ for exe in job["browser_info"]["other_exes"]: | |
kill_all(exe, True) | |
- if self.options.shaper is None or self.options.shaper != 'none': | |
+ if self.options.shaper is None or self.options.shaper != "none": | |
flush_dns() | |
- if 'profile' in task: | |
- if not task['cached'] and os.path.isdir(task['profile']): | |
- logging.debug("Clearing profile %s", task['profile']) | |
- shutil.rmtree(task['profile']) | |
- if not os.path.isdir(task['profile']): | |
- os.makedirs(task['profile']) | |
+ if "profile" in task: | |
+ if not task["cached"] and os.path.isdir(task["profile"]): | |
+ logging.debug("Clearing profile %s", task["profile"]) | |
+ shutil.rmtree(task["profile"]) | |
+ if not os.path.isdir(task["profile"]): | |
+ os.makedirs(task["profile"]) | |
except Exception as err: | |
logging.exception("Exception preparing Browser: %s", err.__str__()) | |
# Modify the hosts file for non-Chrome browsers | |
self.restore_hosts() | |
- if not self.is_chrome and 'dns_override' in task: | |
- self.modify_hosts(task, task['dns_override']) | |
+ if not self.is_chrome and "dns_override" in task: | |
+ self.modify_hosts(task, task["dns_override"]) | |
def modify_hosts(self, task, hosts): | |
"""Add entries to the system's hosts file (non-Windows currently)""" | |
- hosts_backup = os.path.join(os.path.abspath(os.path.dirname(__file__)), "hosts.backup") | |
- hosts_tmp = os.path.join(task['dir'], "hosts.wpt") | |
- hosts_file = '/etc/hosts' | |
- if len(hosts) and platform.system() != 'Windows': | |
- logging.debug('Modifying hosts file:') | |
+ hosts_backup = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "hosts.backup" | |
+ ) | |
+ hosts_tmp = os.path.join(task["dir"], "hosts.wpt") | |
+ hosts_file = "/etc/hosts" | |
+ if len(hosts) and platform.system() != "Windows": | |
+ logging.debug("Modifying hosts file:") | |
try: | |
hosts_text = None | |
- with open(hosts_file, 'r') as f_in: | |
+ with open(hosts_file, "r") as f_in: | |
hosts_text = f_in.read() | |
if hosts_text is not None: | |
hosts_text += "\n" | |
for pair in hosts: | |
hosts_text += "{0} {1}\n".format(pair[1], pair[0]) | |
- with open(hosts_tmp, 'w') as f_out: | |
+ with open(hosts_tmp, "w") as f_out: | |
f_out.write(hosts_text) | |
- subprocess.call(['sudo', 'cp', hosts_file, hosts_backup]) | |
- subprocess.call(['sudo', 'cp', hosts_tmp, hosts_file]) | |
+ subprocess.call(["sudo", "cp", hosts_file, hosts_backup]) | |
+ subprocess.call(["sudo", "cp", hosts_tmp, hosts_file]) | |
os.unlink(hosts_tmp) | |
logging.debug(hosts_text) | |
except Exception as err: | |
logging.exception("Exception modifying hosts file: %s", err.__str__()) | |
def restore_hosts(self): | |
"""See if we have a backup hosts file to restore""" | |
- hosts_backup = os.path.join(os.path.abspath(os.path.dirname(__file__)), "hosts.backup") | |
- hosts_file = '/etc/hosts' | |
- if os.path.isfile(hosts_backup) and platform.system() != 'Windows': | |
- logging.debug('Restoring backup of hosts file') | |
- subprocess.call(['sudo', 'cp', hosts_backup, hosts_file]) | |
- subprocess.call(['sudo', 'rm', hosts_backup]) | |
+ hosts_backup = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "hosts.backup" | |
+ ) | |
+ hosts_file = "/etc/hosts" | |
+ if os.path.isfile(hosts_backup) and platform.system() != "Windows": | |
+ logging.debug("Restoring backup of hosts file") | |
+ subprocess.call(["sudo", "cp", hosts_backup, hosts_file]) | |
+ subprocess.call(["sudo", "rm", hosts_backup]) | |
# pylint: disable=E0611,E0401,E1101 | |
def close_top_window(self, hwnd, _): | |
"""Close all top-level windows""" | |
- keep_titles = ['Start'] | |
- keep_classes = ['ConsoleWindowClass', 'Windows.UI.Core.CoreWindow'] | |
- keep_exes = ['explorer.exe', 'cmd.exe', 'teamviewer.exe'] | |
+ keep_titles = ["Start"] | |
+ keep_classes = ["ConsoleWindowClass", "Windows.UI.Core.CoreWindow"] | |
+ keep_exes = ["explorer.exe", "cmd.exe", "teamviewer.exe"] | |
try: | |
import win32api | |
import win32con | |
import win32event | |
import win32gui | |
import win32process | |
import psutil | |
+ | |
if win32gui.IsWindowVisible(hwnd): | |
window_title = win32gui.GetWindowText(hwnd) | |
window_class = win32gui.GetClassName(hwnd) | |
_, proccess_id = win32process.GetWindowThreadProcessId(hwnd) | |
exe = os.path.basename(psutil.Process(proccess_id).exe()).lower() | |
- if len(window_title) and \ | |
- window_title not in keep_titles and \ | |
- window_class not in keep_classes and \ | |
- exe not in keep_exes: | |
+ if ( | |
+ len(window_title) | |
+ and window_title not in keep_titles | |
+ and window_class not in keep_classes | |
+ and exe not in keep_exes | |
+ ): | |
placement = win32gui.GetWindowPlacement(hwnd) | |
left, top, right, bottom = win32gui.GetWindowRect(hwnd) | |
width = abs(right - left) | |
height = abs(bottom - top) | |
- if width > 0 and height > 0 and \ | |
- top >= 0 and left >= 0 and \ | |
- placement[1] != win32con.SW_SHOWMINIMIZED and \ | |
- placement[1] != win32con.SW_MINIMIZE and \ | |
- placement[1] != win32con.SW_FORCEMINIMIZE: | |
- logging.debug("Closing Window: %s (%s) : %d,%d %dx%d : %d - %s", | |
- window_title, window_class, left, top, width, height, | |
- placement[1], exe) | |
+ if ( | |
+ width > 0 | |
+ and height > 0 | |
+ and top >= 0 | |
+ and left >= 0 | |
+ and placement[1] != win32con.SW_SHOWMINIMIZED | |
+ and placement[1] != win32con.SW_MINIMIZE | |
+ and placement[1] != win32con.SW_FORCEMINIMIZE | |
+ ): | |
+ logging.debug( | |
+ "Closing Window: %s (%s) : %d,%d %dx%d : %d - %s", | |
+ window_title, | |
+ window_class, | |
+ left, | |
+ top, | |
+ width, | |
+ height, | |
+ placement[1], | |
+ exe, | |
+ ) | |
handle = win32api.OpenProcess( | |
- win32con.PROCESS_TERMINATE | win32con.SYNCHRONIZE | | |
- win32con.PROCESS_QUERY_INFORMATION, | |
- 0, proccess_id) | |
+ win32con.PROCESS_TERMINATE | |
+ | win32con.SYNCHRONIZE | |
+ | win32con.PROCESS_QUERY_INFORMATION, | |
+ 0, | |
+ proccess_id, | |
+ ) | |
win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) | |
if handle: | |
result = win32event.WaitForSingleObject(handle, 10000) | |
if result == win32event.WAIT_TIMEOUT: | |
- logging.debug("Terminating process for: %s (%s)", | |
- window_title, window_class) | |
+ logging.debug( | |
+ "Terminating process for: %s (%s)", | |
+ window_title, | |
+ window_class, | |
+ ) | |
win32api.TerminateProcess(handle, 0) | |
win32api.CloseHandle(handle) | |
except Exception as err: | |
pass | |
def close_top_dialog(self, hwnd, _): | |
"""Close all top-level dialogs""" | |
close_classes = ["#32770", "Notepad", "Internet Explorer_Server"] | |
- keep_titles = ['Delete Browsing History', 'Shut Down Windows', 'TeamViewer'] | |
+ keep_titles = ["Delete Browsing History", "Shut Down Windows", "TeamViewer"] | |
try: | |
import win32gui | |
import win32con | |
+ | |
if win32gui.IsWindowVisible(hwnd): | |
window_title = win32gui.GetWindowText(hwnd) | |
window_class = win32gui.GetClassName(hwnd) | |
if window_class in close_classes and window_title not in keep_titles: | |
- logging.debug("Closing Window/Dialog: %s (%s)", window_title, window_class) | |
+ logging.debug( | |
+ "Closing Window/Dialog: %s (%s)", window_title, window_class | |
+ ) | |
win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) | |
except Exception as err: | |
logging.exception("Exception closing window: %s", err.__str__()) | |
def close_dialogs(self): | |
"""Send a close message to any top-level dialogs""" | |
try: | |
import win32gui | |
+ | |
win32gui.EnumWindows(self.close_top_dialog, None) | |
except Exception: | |
pass | |
def background_cleanup(self): | |
@@ -217,37 +254,46 @@ | |
def prepare_windows(self): | |
"""Do Windows-specific cleanup and prep""" | |
try: | |
from .os_util import kill_all | |
import win32gui | |
+ | |
kill_all("WerFault.exe", True) | |
win32gui.EnumWindows(self.close_top_window, None) | |
except Exception: | |
pass | |
+ | |
# pylint: enable=E0611,E0401,E1101 | |
def find_default_interface(self): | |
"""Look through the list of interfaces for the non-loopback interface""" | |
import psutil | |
+ | |
try: | |
if self.interfaces is None: | |
self.interfaces = {} | |
# Look to see which interfaces are up | |
stats = psutil.net_if_stats() | |
for interface in stats: | |
- if interface != 'lo' and interface[:3] != 'ifb' and stats[interface].isup: | |
- self.interfaces[interface] = {'packets': 0} | |
+ if ( | |
+ interface != "lo" | |
+ and interface[:3] != "ifb" | |
+ and stats[interface].isup | |
+ ): | |
+ self.interfaces[interface] = {"packets": 0} | |
if len(self.interfaces) > 1: | |
# See which interfaces have received data | |
cnt = psutil.net_io_counters(True) | |
for interface in cnt: | |
if interface in self.interfaces: | |
- self.interfaces[interface]['packets'] = \ | |
- cnt[interface].packets_sent + cnt[interface].packets_recv | |
+ self.interfaces[interface]["packets"] = ( | |
+ cnt[interface].packets_sent | |
+ + cnt[interface].packets_recv | |
+ ) | |
remove = [] | |
for interface in self.interfaces: | |
- if self.interfaces[interface]['packets'] == 0: | |
+ if self.interfaces[interface]["packets"] == 0: | |
remove.append(interface) | |
if len(remove): | |
for interface in remove: | |
del self.interfaces[interface] | |
if len(self.interfaces) > 1: | |
@@ -255,11 +301,11 @@ | |
remove = [] | |
addresses = psutil.net_if_addrs() | |
for interface in addresses: | |
if interface in self.interfaces: | |
for address in addresses[interface]: | |
- if address.address == '127.0.0.1': | |
+ if address.address == "127.0.0.1": | |
remove.append(interface) | |
break | |
if len(remove): | |
for interface in remove: | |
del self.interfaces[interface] | |
@@ -268,26 +314,27 @@ | |
def launch_browser(self, command_line): | |
"""Launch the browser and keep track of the process""" | |
command_line = self.enable_cpu_throttling(command_line) | |
logging.debug(command_line) | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
self.proc = subprocess.Popen(command_line, shell=True) | |
else: | |
self.proc = subprocess.Popen(command_line, preexec_fn=os.setsid, shell=True) | |
def close_browser(self, job, _task): | |
"""Terminate the browser but don't do all of the cleanup that stop does""" | |
if self.proc: | |
logging.debug("Closing browser") | |
from .os_util import kill_all | |
+ | |
kill_all(os.path.basename(self.path), False) | |
- if 'browser_info' in job and 'other_exes' in job['browser_info']: | |
- for exe in job['browser_info']['other_exes']: | |
+ if "browser_info" in job and "other_exes" in job["browser_info"]: | |
+ for exe in job["browser_info"]["other_exes"]: | |
kill_all(exe, False) | |
try: | |
- if platform.system() != 'Windows': | |
+ if platform.system() != "Windows": | |
os.killpg(os.getpgid(self.proc.pid), signal.SIGTERM) | |
self.proc.terminate() | |
self.proc.kill() | |
except Exception: | |
pass | |
@@ -300,12 +347,12 @@ | |
logging.debug("Stopping browser") | |
self.close_browser(job, task) | |
self.disable_cpu_throttling() | |
self.restore_hosts() | |
# Clean up the downloads folder in case anything was downloaded | |
- if platform.system() == 'Linux': | |
- downloads = os.path.expanduser('~/Downloads') | |
+ if platform.system() == "Linux": | |
+ downloads = os.path.expanduser("~/Downloads") | |
if os.path.isdir(downloads): | |
try: | |
shutil.rmtree(downloads) | |
os.makedirs(downloads) | |
except Exception: | |
@@ -318,14 +365,15 @@ | |
self.thread = None | |
def wait_for_idle(self): | |
"""Wait for no more than 50% of a single core used for 500ms""" | |
import psutil | |
+ | |
logging.debug("Waiting for Idle...") | |
cpu_count = psutil.cpu_count() | |
if cpu_count > 0: | |
- target_pct = 50. / float(cpu_count) | |
+ target_pct = 50.0 / float(cpu_count) | |
idle_start = None | |
end_time = monotonic.monotonic() + self.START_BROWSER_TIME_LIMIT | |
idle = False | |
while not idle and monotonic.monotonic() < end_time: | |
check_start = monotonic.monotonic() | |
@@ -338,137 +386,197 @@ | |
else: | |
idle_start = None | |
def clear_profile(self, task): | |
"""Delete the browser profile directory""" | |
- if os.path.isdir(task['profile']): | |
+ if os.path.isdir(task["profile"]): | |
end_time = monotonic.monotonic() + 30 | |
while monotonic.monotonic() < end_time: | |
try: | |
- shutil.rmtree(task['profile']) | |
+ shutil.rmtree(task["profile"]) | |
except Exception: | |
pass | |
- if os.path.isdir(task['profile']): | |
+ if os.path.isdir(task["profile"]): | |
time.sleep(0.1) | |
else: | |
break | |
def execute_js(self, _): | |
"""Run javascipt (stub for overriding""" | |
return None | |
def prepare_script_for_record(self, script): | |
"""Convert a script command into one that first removes the orange frame""" | |
- return "(function() {" \ | |
- "var wptDiv = document.getElementById('wptorange');" \ | |
- "if(wptDiv) {wptDiv.parentNode.removeChild(wptDiv);}" \ | |
- "window.requestAnimationFrame(function(){" \ | |
- "window.requestAnimationFrame(function(){" + script + "});"\ | |
- "});" \ | |
- "})();" | |
+ return ( | |
+ "(function() {" | |
+ "var wptDiv = document.getElementById('wptorange');" | |
+ "if(wptDiv) {wptDiv.parentNode.removeChild(wptDiv);}" | |
+ "window.requestAnimationFrame(function(){" | |
+ "window.requestAnimationFrame(function(){" + script + "});" | |
+ "});" | |
+ "})();" | |
+ ) | |
def on_start_recording(self, task): | |
"""Notification that we are about to start an operation that needs to be recorded""" | |
import psutil | |
+ | |
if self.device_pixel_ratio is None: | |
self.device_pixel_ratio = 1.0 | |
try: | |
- ratio = self.execute_js('window.devicePixelRatio') | |
+ ratio = self.execute_js("window.devicePixelRatio") | |
if ratio is not None: | |
self.device_pixel_ratio = max(1.0, float(ratio)) | |
except Exception: | |
pass | |
- if task['log_data']: | |
- if not self.job['shaper'].configure(self.job, task): | |
- self.task['error'] = "Error configuring traffic-shaping" | |
+ if task["log_data"]: | |
+ if not self.job["shaper"].configure(self.job, task): | |
+ self.task["error"] = "Error configuring traffic-shaping" | |
self.cpu_start = psutil.cpu_times() | |
self.recording = True | |
ver = platform.uname() | |
- task['page_data']['osVersion'] = '{0} {1}'.format(ver[0], ver[2]) | |
- task['page_data']['os_version'] = '{0} {1}'.format(ver[0], ver[2]) | |
+ task["page_data"]["osVersion"] = "{0} {1}".format(ver[0], ver[2]) | |
+ task["page_data"]["os_version"] = "{0} {1}".format(ver[0], ver[2]) | |
# Spawn tcpdump | |
if self.tcpdump_enabled: | |
- self.pcap_file = os.path.join(task['dir'], task['prefix']) + '.cap' | |
- interface = 'any' if self.job['interface'] is None else self.job['interface'] | |
- if platform.system() == 'Windows': | |
- tcpdump = os.path.join(self.support_path, 'windows', 'WinDump.exe') | |
- if interface == 'any': | |
- args = [tcpdump, '-p', '-s', '0', '-w', self.pcap_file] | |
+ self.pcap_file = os.path.join(task["dir"], task["prefix"]) + ".cap" | |
+ interface = ( | |
+ "any" if self.job["interface"] is None else self.job["interface"] | |
+ ) | |
+ if platform.system() == "Windows": | |
+ tcpdump = os.path.join(self.support_path, "windows", "WinDump.exe") | |
+ if interface == "any": | |
+ args = [tcpdump, "-p", "-s", "0", "-w", self.pcap_file] | |
else: | |
- args = [tcpdump, '-p', '-i', interface, '-s', '0', | |
- '-w', self.pcap_file] | |
- logging.debug(' '.join(args)) | |
- self.tcpdump = subprocess.Popen(args, | |
- creationflags=subprocess.CREATE_NEW_PROCESS_GROUP) | |
+ args = [ | |
+ tcpdump, | |
+ "-p", | |
+ "-i", | |
+ interface, | |
+ "-s", | |
+ "0", | |
+ "-w", | |
+ self.pcap_file, | |
+ ] | |
+ logging.debug(" ".join(args)) | |
+ self.tcpdump = subprocess.Popen( | |
+ args, creationflags=subprocess.CREATE_NEW_PROCESS_GROUP | |
+ ) | |
else: | |
- args = ['sudo', 'tcpdump', '-p', '-i', interface, '-s', '0', | |
- '-w', self.pcap_file] | |
- logging.debug(' '.join(args)) | |
+ args = [ | |
+ "sudo", | |
+ "tcpdump", | |
+ "-p", | |
+ "-i", | |
+ interface, | |
+ "-s", | |
+ "0", | |
+ "-w", | |
+ self.pcap_file, | |
+ ] | |
+ logging.debug(" ".join(args)) | |
self.tcpdump = subprocess.Popen(args) | |
# Wait for the capture file to start growing | |
end_time = monotonic.monotonic() + 5 | |
started = False | |
while not started and monotonic.monotonic() < end_time: | |
if os.path.isfile(self.pcap_file): | |
started = True | |
time.sleep(0.1) | |
# Start video capture | |
- if self.job['capture_display'] is not None and not self.job['disable_video']: | |
- if task['navigated']: | |
+ if ( | |
+ self.job["capture_display"] is not None | |
+ and not self.job["disable_video"] | |
+ ): | |
+ if task["navigated"]: | |
self.execute_js(SET_ORANGE) | |
time.sleep(1) | |
- task['video_file'] = os.path.join(task['dir'], task['prefix']) + '_video.mp4' | |
- if platform.system() == 'Windows': | |
+ task["video_file"] = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_video.mp4" | |
+ ) | |
+ if platform.system() == "Windows": | |
from win32api import GetSystemMetrics | |
+ | |
self.screen_width = GetSystemMetrics(0) | |
self.screen_height = GetSystemMetrics(1) | |
- elif platform.system() == 'Darwin': | |
+ elif platform.system() == "Darwin": | |
try: | |
from AppKit import NSScreen | |
- self.screen_width = int(NSScreen.screens()[0].frame().size.width) | |
- self.screen_height = int(NSScreen.screens()[0].frame().size.height) | |
+ | |
+ self.screen_width = int( | |
+ NSScreen.screens()[0].frame().size.width | |
+ ) | |
+ self.screen_height = int( | |
+ NSScreen.screens()[0].frame().size.height | |
+ ) | |
except Exception: | |
pass | |
- task['width'] = min(task['width'], self.screen_width) | |
- task['height'] = min(task['height'], self.screen_height) | |
- if platform.system() == 'Darwin': | |
- width = int(math.ceil(task['width'] * self.device_pixel_ratio)) | |
- height = int(math.ceil(task['height'] * self.device_pixel_ratio)) | |
- args = ['ffmpeg', '-f', 'avfoundation', | |
- '-i', str(self.job['capture_display']), | |
- '-r', str(self.job['fps']), | |
- '-filter:v', | |
- 'crop={0:d}:{1:d}:0:0'.format(width, height), | |
- '-codec:v', 'libx264rgb', '-crf', '0', '-preset', 'ultrafast', | |
- task['video_file']] | |
+ task["width"] = min(task["width"], self.screen_width) | |
+ task["height"] = min(task["height"], self.screen_height) | |
+ if platform.system() == "Darwin": | |
+ width = int(math.ceil(task["width"] * self.device_pixel_ratio)) | |
+ height = int(math.ceil(task["height"] * self.device_pixel_ratio)) | |
+ args = [ | |
+ "ffmpeg", | |
+ "-f", | |
+ "avfoundation", | |
+ "-i", | |
+ str(self.job["capture_display"]), | |
+ "-r", | |
+ str(self.job["fps"]), | |
+ "-filter:v", | |
+ "crop={0:d}:{1:d}:0:0".format(width, height), | |
+ "-codec:v", | |
+ "libx264rgb", | |
+ "-crf", | |
+ "0", | |
+ "-preset", | |
+ "ultrafast", | |
+ task["video_file"], | |
+ ] | |
else: | |
- grab = 'gdigrab' if platform.system() == 'Windows' else 'x11grab' | |
- args = ['ffmpeg', '-f', grab, '-video_size', | |
- '{0:d}x{1:d}'.format(task['width'], task['height']), | |
- '-framerate', str(self.job['fps']), | |
- '-draw_mouse', '0', '-i', str(self.job['capture_display']), | |
- '-codec:v', 'libx264rgb', '-crf', '0', '-preset', 'ultrafast', | |
- task['video_file']] | |
- if platform.system() in ['Linux', 'Darwin']: | |
- args.insert(0, 'nice') | |
- args.insert(1, '-n') | |
- args.insert(2, '10') | |
- logging.debug(' '.join(args)) | |
+ grab = "gdigrab" if platform.system() == "Windows" else "x11grab" | |
+ args = [ | |
+ "ffmpeg", | |
+ "-f", | |
+ grab, | |
+ "-video_size", | |
+ "{0:d}x{1:d}".format(task["width"], task["height"]), | |
+ "-framerate", | |
+ str(self.job["fps"]), | |
+ "-draw_mouse", | |
+ "0", | |
+ "-i", | |
+ str(self.job["capture_display"]), | |
+ "-codec:v", | |
+ "libx264rgb", | |
+ "-crf", | |
+ "0", | |
+ "-preset", | |
+ "ultrafast", | |
+ task["video_file"], | |
+ ] | |
+ if platform.system() in ["Linux", "Darwin"]: | |
+ args.insert(0, "nice") | |
+ args.insert(1, "-n") | |
+ args.insert(2, "10") | |
+ logging.debug(" ".join(args)) | |
try: | |
- if platform.system() == 'Windows': | |
- self.ffmpeg = subprocess.Popen(args, | |
- creationflags=subprocess.CREATE_NEW_PROCESS_GROUP) | |
+ if platform.system() == "Windows": | |
+ self.ffmpeg = subprocess.Popen( | |
+ args, creationflags=subprocess.CREATE_NEW_PROCESS_GROUP | |
+ ) | |
else: | |
self.ffmpeg = subprocess.Popen(args) | |
# Wait up to 5 seconds for something to be captured | |
end_time = monotonic.monotonic() + 5 | |
started = False | |
initial_size = None | |
while not started and monotonic.monotonic() < end_time: | |
- if os.path.isfile(task['video_file']): | |
- video_size = os.path.getsize(task['video_file']) | |
+ if os.path.isfile(task["video_file"]): | |
+ video_size = os.path.getsize(task["video_file"]) | |
if initial_size == None: | |
initial_size = video_size | |
logging.debug("Video file size: %d", video_size) | |
if video_size > initial_size or video_size > 10000: | |
started = True | |
@@ -486,121 +594,159 @@ | |
self.start_cpu_throttling() | |
def on_stop_capture(self, task): | |
"""Do any quick work to stop things that are capturing data""" | |
if self.tcpdump is not None: | |
- logging.debug('Stopping tcpdump') | |
+ logging.debug("Stopping tcpdump") | |
from .os_util import kill_all | |
- if platform.system() == 'Windows': | |
+ | |
+ if platform.system() == "Windows": | |
os.kill(self.tcpdump.pid, signal.CTRL_BREAK_EVENT) | |
- kill_all('WinDump', False) | |
+ kill_all("WinDump", False) | |
else: | |
- subprocess.call(['sudo', 'killall', 'tcpdump']) | |
- kill_all('tcpdump', False) | |
+ subprocess.call(["sudo", "killall", "tcpdump"]) | |
+ kill_all("tcpdump", False) | |
if self.ffmpeg is not None: | |
- logging.debug('Stopping video capture') | |
+ logging.debug("Stopping video capture") | |
self.video_capture_running = False | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
os.kill(self.ffmpeg.pid, signal.CTRL_BREAK_EVENT) | |
else: | |
self.ffmpeg.terminate() | |
def on_stop_recording(self, task): | |
"""Notification that we are done with recording""" | |
self.stop_cpu_throttling() | |
import psutil | |
+ | |
if self.cpu_start is not None: | |
cpu_end = psutil.cpu_times() | |
- cpu_busy = (cpu_end.user - self.cpu_start.user) + \ | |
- (cpu_end.system - self.cpu_start.system) | |
+ cpu_busy = (cpu_end.user - self.cpu_start.user) + ( | |
+ cpu_end.system - self.cpu_start.system | |
+ ) | |
cpu_total = cpu_busy + (cpu_end.idle - self.cpu_start.idle) | |
cpu_pct = cpu_busy * 100.0 / cpu_total | |
- task['page_data']['fullyLoadedCPUms'] = int(cpu_busy * 1000.0) | |
- task['page_data']['fullyLoadedCPUpct'] = cpu_pct | |
+ task["page_data"]["fullyLoadedCPUms"] = int(cpu_busy * 1000.0) | |
+ task["page_data"]["fullyLoadedCPUpct"] = cpu_pct | |
self.cpu_start = None | |
self.recording = False | |
if self.thread is not None: | |
self.thread.join(10) | |
self.thread = None | |
# record the CPU/Bandwidth/memory info | |
- if self.usage_queue is not None and not self.usage_queue.empty() and task is not None: | |
- file_path = os.path.join(task['dir'], task['prefix']) + '_progress.csv.gz' | |
- gzfile = gzip.open(file_path, 'wb', 7) | |
+ if ( | |
+ self.usage_queue is not None | |
+ and not self.usage_queue.empty() | |
+ and task is not None | |
+ ): | |
+ file_path = os.path.join(task["dir"], task["prefix"]) + "_progress.csv.gz" | |
+ gzfile = gzip.open(file_path, "wb", 7) | |
if gzfile: | |
- gzfile.write("Offset Time (ms),Bandwidth In (bps),CPU Utilization (%),Memory\n") | |
+ gzfile.write( | |
+ "Offset Time (ms),Bandwidth In (bps),CPU Utilization (%),Memory\n" | |
+ ) | |
while not self.usage_queue.empty(): | |
snapshot = self.usage_queue.get_nowait() | |
- gzfile.write('{0:d},{1:d},{2:0.2f},-1\n'.format( | |
- snapshot['time'], snapshot['bw'], snapshot['cpu'])) | |
+ gzfile.write( | |
+ "{0:d},{1:d},{2:0.2f},-1\n".format( | |
+ snapshot["time"], snapshot["bw"], snapshot["cpu"] | |
+ ) | |
+ ) | |
gzfile.close() | |
if self.tcpdump is not None: | |
- logging.debug('Waiting for tcpdump to stop') | |
+ logging.debug("Waiting for tcpdump to stop") | |
from .os_util import wait_for_all | |
- if platform.system() == 'Windows': | |
- wait_for_all('WinDump') | |
+ | |
+ if platform.system() == "Windows": | |
+ wait_for_all("WinDump") | |
else: | |
- wait_for_all('tcpdump') | |
+ wait_for_all("tcpdump") | |
self.tcpdump = None | |
if self.ffmpeg is not None: | |
- logging.debug('Waiting for video capture to finish') | |
+ logging.debug("Waiting for video capture to finish") | |
self.ffmpeg.communicate() | |
self.ffmpeg = None | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
from .os_util import kill_all | |
- kill_all('ffmpeg.exe', True) | |
+ | |
+ kill_all("ffmpeg.exe", True) | |
else: | |
- subprocess.call(['killall', '-9', 'ffmpeg']) | |
+ subprocess.call(["killall", "-9", "ffmpeg"]) | |
# kick off the video processing (async) | |
- if 'video_file' in task and os.path.isfile(task['video_file']): | |
- video_path = os.path.join(task['dir'], task['video_subdirectory']) | |
- support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
- if task['current_step'] == 1: | |
- filename = '{0:d}.{1:d}.histograms.json.gz'.format(task['run'], task['cached']) | |
+ if "video_file" in task and os.path.isfile(task["video_file"]): | |
+ video_path = os.path.join(task["dir"], task["video_subdirectory"]) | |
+ support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
+ if task["current_step"] == 1: | |
+ filename = "{0:d}.{1:d}.histograms.json.gz".format( | |
+ task["run"], task["cached"] | |
+ ) | |
else: | |
- filename = '{0:d}.{1:d}.{2:d}.histograms.json.gz'.format(task['run'], | |
- task['cached'], | |
- task['current_step']) | |
- histograms = os.path.join(task['dir'], filename) | |
- progress_file = os.path.join(task['dir'], task['prefix']) + '_visual_progress.json.gz' | |
+ filename = "{0:d}.{1:d}.{2:d}.histograms.json.gz".format( | |
+ task["run"], task["cached"], task["current_step"] | |
+ ) | |
+ histograms = os.path.join(task["dir"], filename) | |
+ progress_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_visual_progress.json.gz" | |
+ ) | |
visualmetrics = os.path.join(support_path, "visualmetrics.py") | |
- args = ['python', visualmetrics, '-i', task['video_file'], | |
- '-d', video_path, '--force', '--quality', | |
- '{0:d}'.format(self.job['imageQuality']), | |
- '--viewport', '--orange', '--maxframes', '50', '--histogram', histograms, | |
- '--progress', progress_file] | |
- if 'debug' in self.job and self.job['debug']: | |
- args.append('-vvvv') | |
- if not task['navigated']: | |
- args.append('--forceblank') | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
- hero_elements_file = os.path.join(task['dir'], task['prefix']) + '_hero_elements.json.gz' | |
- args.extend(['--herodata', hero_elements_file]) | |
- if 'renderVideo' in self.job and self.job['renderVideo']: | |
- video_out = os.path.join(task['dir'], task['prefix']) + '_rendered_video.mp4' | |
- args.extend(['--render', video_out]) | |
- if 'fullSizeVideo' in self.job and self.job['fullSizeVideo']: | |
- args.append('--full') | |
- if 'thumbsize' in self.job: | |
+ args = [ | |
+ "python", | |
+ visualmetrics, | |
+ "-i", | |
+ task["video_file"], | |
+ "-d", | |
+ video_path, | |
+ "--force", | |
+ "--quality", | |
+ "{0:d}".format(self.job["imageQuality"]), | |
+ "--viewport", | |
+ "--orange", | |
+ "--maxframes", | |
+ "50", | |
+ "--histogram", | |
+ histograms, | |
+ "--progress", | |
+ progress_file, | |
+ ] | |
+ if "debug" in self.job and self.job["debug"]: | |
+ args.append("-vvvv") | |
+ if not task["navigated"]: | |
+ args.append("--forceblank") | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
+ hero_elements_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_hero_elements.json.gz" | |
+ ) | |
+ args.extend(["--herodata", hero_elements_file]) | |
+ if "renderVideo" in self.job and self.job["renderVideo"]: | |
+ video_out = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_rendered_video.mp4" | |
+ ) | |
+ args.extend(["--render", video_out]) | |
+ if "fullSizeVideo" in self.job and self.job["fullSizeVideo"]: | |
+ args.append("--full") | |
+ if "thumbsize" in self.job: | |
try: | |
- thumbsize = int(self.job['thumbsize']) | |
+ thumbsize = int(self.job["thumbsize"]) | |
if thumbsize > 0 and thumbsize <= 2000: | |
- args.extend(['--thumbsize', str(thumbsize)]) | |
+ args.extend(["--thumbsize", str(thumbsize)]) | |
except Exception: | |
pass | |
- logging.debug(' '.join(args)) | |
+ logging.debug(" ".join(args)) | |
self.video_processing = subprocess.Popen(args) | |
- self.job['shaper'].reset() | |
+ self.job["shaper"].reset() | |
def on_start_processing(self, task): | |
"""Start any processing of the captured data""" | |
# Process the tcpdump | |
if self.pcap_file is not None: | |
- logging.debug('Compressing pcap') | |
+ logging.debug("Compressing pcap") | |
if os.path.isfile(self.pcap_file): | |
- pcap_out = self.pcap_file + '.gz' | |
- with open(self.pcap_file, 'rb') as f_in: | |
- with gzip.open(pcap_out, 'wb', 7) as f_out: | |
+ pcap_out = self.pcap_file + ".gz" | |
+ with open(self.pcap_file, "rb") as f_in: | |
+ with gzip.open(pcap_out, "wb", 7) as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
if os.path.isfile(pcap_out): | |
self.pcap_thread = threading.Thread(target=self.process_pcap) | |
self.pcap_thread.daemon = True | |
self.pcap_thread.start() | |
@@ -610,169 +756,198 @@ | |
pass | |
def wait_for_processing(self, task): | |
"""Wait for any background processing threads to finish""" | |
if self.video_processing is not None: | |
- logging.debug('Waiting for video processing to finish') | |
+ logging.debug("Waiting for video processing to finish") | |
self.video_processing.communicate() | |
self.video_processing = None | |
- logging.debug('Video processing complete') | |
- if not self.job['keepvideo']: | |
+ logging.debug("Video processing complete") | |
+ if not self.job["keepvideo"]: | |
try: | |
- os.remove(task['video_file']) | |
+ os.remove(task["video_file"]) | |
except Exception: | |
pass | |
if self.pcap_thread is not None: | |
- logging.debug('Waiting for pcap processing to finish') | |
+ logging.debug("Waiting for pcap processing to finish") | |
self.pcap_thread.join() | |
self.pcap_thread = None | |
self.pcap_file = None | |
def step_complete(self, task): | |
"""All of the processing for the current test step is complete""" | |
# Write out the accumulated page_data | |
- if task['log_data'] and task['page_data']: | |
- if 'browser' in self.job: | |
- task['page_data']['browser_name'] = self.job['browser'] | |
- if 'step_name' in task: | |
- task['page_data']['eventName'] = task['step_name'] | |
- if 'run_start_time' in task: | |
- task['page_data']['test_run_time_ms'] = \ | |
- int(round((monotonic.monotonic() - task['run_start_time']) * 1000.0)) | |
- path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') | |
- json_page_data = json.dumps(task['page_data']) | |
- logging.debug('Page Data: %s', json_page_data) | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ if task["log_data"] and task["page_data"]: | |
+ if "browser" in self.job: | |
+ task["page_data"]["browser_name"] = self.job["browser"] | |
+ if "step_name" in task: | |
+ task["page_data"]["eventName"] = task["step_name"] | |
+ if "run_start_time" in task: | |
+ task["page_data"]["test_run_time_ms"] = int( | |
+ round((monotonic.monotonic() - task["run_start_time"]) * 1000.0) | |
+ ) | |
+ path = os.path.join(task["dir"], task["prefix"] + "_page_data.json.gz") | |
+ json_page_data = json.dumps(task["page_data"]) | |
+ logging.debug("Page Data: %s", json_page_data) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json_page_data) | |
def process_pcap(self): | |
"""Process the pcap in a background thread""" | |
- pcap_file = self.pcap_file + '.gz' | |
+ pcap_file = self.pcap_file + ".gz" | |
if os.path.isfile(pcap_file): | |
- path_base = os.path.join(self.task['dir'], self.task['prefix']) | |
- slices_file = path_base + '_pcap_slices.json.gz' | |
- pcap_parser = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', "pcap-parser.py") | |
- cmd = ['python', pcap_parser, '--json', '-i', pcap_file, '-d', slices_file] | |
+ path_base = os.path.join(self.task["dir"], self.task["prefix"]) | |
+ slices_file = path_base + "_pcap_slices.json.gz" | |
+ pcap_parser = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "pcap-parser.py" | |
+ ) | |
+ cmd = ["python", pcap_parser, "--json", "-i", pcap_file, "-d", slices_file] | |
logging.debug(cmd) | |
try: | |
stdout = subprocess.check_output(cmd) | |
if stdout is not None: | |
result = json.loads(stdout) | |
if result: | |
- if 'in' in result: | |
- self.task['page_data']['pcapBytesIn'] = result['in'] | |
- if 'out' in result: | |
- self.task['page_data']['pcapBytesOut'] = result['out'] | |
- if 'in_dup' in result: | |
- self.task['page_data']['pcapBytesInDup'] = result['in_dup'] | |
+ if "in" in result: | |
+ self.task["page_data"]["pcapBytesIn"] = result["in"] | |
+ if "out" in result: | |
+ self.task["page_data"]["pcapBytesOut"] = result["out"] | |
+ if "in_dup" in result: | |
+ self.task["page_data"]["pcapBytesInDup"] = result["in_dup"] | |
except Exception: | |
pass | |
def get_net_bytes(self): | |
"""Get the bytes received, ignoring the loopback interface""" | |
import psutil | |
+ | |
bytes_in = 0 | |
net = psutil.net_io_counters(True) | |
for interface in net: | |
if self.interfaces is not None: | |
if interface in self.interfaces: | |
bytes_in += net[interface].bytes_recv | |
- elif interface != 'lo' and interface[:3] != 'ifb': | |
+ elif interface != "lo" and interface[:3] != "ifb": | |
bytes_in += net[interface].bytes_recv | |
return bytes_in | |
def background_thread(self): | |
"""Background thread for monitoring CPU and bandwidth usage""" | |
import psutil | |
+ | |
last_time = start_time = monotonic.monotonic() | |
last_bytes = self.get_net_bytes() | |
- snapshot = {'time': 0, 'cpu': 0.0, 'bw': 0} | |
+ snapshot = {"time": 0, "cpu": 0.0, "bw": 0} | |
self.usage_queue.put(snapshot) | |
while self.recording: | |
- snapshot = {'bw': 0} | |
- snapshot['cpu'] = psutil.cpu_percent(interval=0.1) | |
+ snapshot = {"bw": 0} | |
+ snapshot["cpu"] = psutil.cpu_percent(interval=0.1) | |
now = monotonic.monotonic() | |
- snapshot['time'] = int((now - start_time) * 1000) | |
+ snapshot["time"] = int((now - start_time) * 1000) | |
# calculate the bandwidth over the last interval in Kbps | |
bytes_in = self.get_net_bytes() | |
if now > last_time: | |
- snapshot['bw'] = int((bytes_in - last_bytes) * 8.0 / (now - last_time)) | |
+ snapshot["bw"] = int((bytes_in - last_bytes) * 8.0 / (now - last_time)) | |
last_time = now | |
last_bytes = bytes_in | |
self.usage_queue.put(snapshot) | |
# if we are capturing video, make sure it doesn't get too big | |
- if self.ffmpeg is not None and \ | |
- self.video_capture_running and \ | |
- 'video_file' in self.task and \ | |
- os.path.isfile(self.task['video_file']): | |
- video_size = os.path.getsize(self.task['video_file']) | |
+ if ( | |
+ self.ffmpeg is not None | |
+ and self.video_capture_running | |
+ and "video_file" in self.task | |
+ and os.path.isfile(self.task["video_file"]) | |
+ ): | |
+ video_size = os.path.getsize(self.task["video_file"]) | |
if video_size > 50000000: | |
- logging.debug('Stopping video capture - File is too big: %d', video_size) | |
+ logging.debug( | |
+ "Stopping video capture - File is too big: %d", video_size | |
+ ) | |
self.video_capture_running = False | |
- if platform.system() == 'Windows': | |
+ if platform.system() == "Windows": | |
os.kill(self.ffmpeg.pid, signal.CTRL_BREAK_EVENT) | |
else: | |
self.ffmpeg.terminate() | |
def enable_cpu_throttling(self, command_line): | |
"""Prepare the CPU throttling if necessary""" | |
- if self.options.throttle and 'throttle_cpu' in self.job: | |
- logging.debug('CPU Throttle target: %0.3fx', self.job['throttle_cpu']) | |
- if self.options.throttle and 'throttle_cpu' in self.job and \ | |
- self.job['throttle_cpu'] > 1: | |
+ if self.options.throttle and "throttle_cpu" in self.job: | |
+ logging.debug("CPU Throttle target: %0.3fx", self.job["throttle_cpu"]) | |
+ if ( | |
+ self.options.throttle | |
+ and "throttle_cpu" in self.job | |
+ and self.job["throttle_cpu"] > 1 | |
+ ): | |
try: | |
import getpass | |
- uid = '{0}:{0}'.format(getpass.getuser()) | |
- cmd = ['sudo', 'cgcreate', '-a', uid, '-t', uid, '-g', 'cpu,cpuset:wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ | |
+ uid = "{0}:{0}".format(getpass.getuser()) | |
+ cmd = [ | |
+ "sudo", | |
+ "cgcreate", | |
+ "-a", | |
+ uid, | |
+ "-t", | |
+ uid, | |
+ "-g", | |
+ "cpu,cpuset:wptagent", | |
+ ] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
- cmd = ['sudo', 'cgset', '-r', 'cpuset.cpus="0"', 'wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ cmd = ["sudo", "cgset", "-r", 'cpuset.cpus="0"', "wptagent"] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
- cmd = ['sudo', 'cgset', '-r', 'cpu.cfs_period_us=1000', 'wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ cmd = ["sudo", "cgset", "-r", "cpu.cfs_period_us=1000", "wptagent"] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
- cmd = ['sudo', 'cgset', '-r', 'cpu.cfs_quota_us=1000', 'wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ cmd = ["sudo", "cgset", "-r", "cpu.cfs_quota_us=1000", "wptagent"] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
- command_line = 'cgexec -g cpu:wptagent ' + command_line | |
+ command_line = "cgexec -g cpu:wptagent " + command_line | |
except Exception as err: | |
logging.critical("Exception enabling throttling: %s", err.__str__()) | |
self.throttling_cpu = True | |
return command_line | |
def disable_cpu_throttling(self): | |
"""Remove the CPU throttling if necessary""" | |
if self.throttling_cpu: | |
try: | |
- cmd = ['sudo', 'cgdelete', '-r', 'cpu,cpuset:wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ cmd = ["sudo", "cgdelete", "-r", "cpu,cpuset:wptagent"] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
except Exception: | |
pass | |
def start_cpu_throttling(self): | |
"""Start the CPU throttling if necessary""" | |
- if self.options.throttle and 'throttle_cpu' in self.job: | |
- self.task['page_data']['throttle_cpu_requested'] = self.job['throttle_cpu_requested'] | |
+ if self.options.throttle and "throttle_cpu" in self.job: | |
+ self.task["page_data"]["throttle_cpu_requested"] = self.job[ | |
+ "throttle_cpu_requested" | |
+ ] | |
if self.throttling_cpu: | |
- self.task['page_data']['throttle_cpu'] = self.job['throttle_cpu'] | |
+ self.task["page_data"]["throttle_cpu"] = self.job["throttle_cpu"] | |
try: | |
# Leave the quota at 1000 and vary the period to get to the correct multiplier | |
- period = int(round(1000.0 * self.job['throttle_cpu'])) | |
- cmd = ['sudo', 'cgset', '-r', 'cpu.cfs_period_us={0:d}'.format(period), 'wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ period = int(round(1000.0 * self.job["throttle_cpu"])) | |
+ cmd = [ | |
+ "sudo", | |
+ "cgset", | |
+ "-r", | |
+ "cpu.cfs_period_us={0:d}".format(period), | |
+ "wptagent", | |
+ ] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
except Exception: | |
pass | |
def stop_cpu_throttling(self): | |
"""Start the CPU throttling if necessary""" | |
if self.throttling_cpu: | |
try: | |
- cmd = ['sudo', 'cgset', '-r', 'cpu.cfs_period_us=1000', 'wptagent'] | |
- logging.debug(' '.join(cmd)) | |
+ cmd = ["sudo", "cgset", "-r", "cpu.cfs_period_us=1000", "wptagent"] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
except Exception: | |
pass | |
--- internal/adb.py 2018-11-07 20:41:27.841708 +0000 | |
+++ internal/adb.py 2019-02-06 17:08:27.279840 +0000 | |
@@ -11,12 +11,14 @@ | |
import time | |
import monotonic | |
# cSpell:ignore vpndialogs, sysctl, iptables, ifconfig, dstaddr, clientidbase, nsecs | |
+ | |
class Adb(object): | |
"""ADB command-line interface""" | |
+ | |
def __init__(self, options, cache_dir): | |
self.options = options | |
self.device = options.device | |
self.rndis = options.rndis | |
self.ping_address = None | |
@@ -36,37 +38,41 @@ | |
self.last_network_ok = monotonic.monotonic() | |
self.needs_exit = False | |
self.rebooted = False | |
self.vpn_forwarder = None | |
self.known_apps = { | |
- 'com.motorola.ccc.ota': {}, | |
- 'com.google.android.apps.docs': {}, | |
- 'com.samsung.android.MtpApplication': {} | |
+ "com.motorola.ccc.ota": {}, | |
+ "com.google.android.apps.docs": {}, | |
+ "com.samsung.android.MtpApplication": {}, | |
} | |
self.gnirehtet = None | |
self.gnirehtet_exe = None | |
if options.gnirehtet: | |
if platform.system() == "Windows": | |
- if platform.machine().endswith('64'): | |
- self.gnirehtet_exe = os.path.join(self.root_path, 'gnirehtet', | |
- 'win64', 'gnirehtet.exe') | |
+ if platform.machine().endswith("64"): | |
+ self.gnirehtet_exe = os.path.join( | |
+ self.root_path, "gnirehtet", "win64", "gnirehtet.exe" | |
+ ) | |
elif platform.system() == "Linux": | |
- if os.uname()[4].startswith('arm'): | |
- self.gnirehtet_exe = os.path.join(self.root_path, 'gnirehtet', | |
- 'arm', 'gnirehtet') | |
- elif platform.architecture()[0] == '64bit': | |
- self.gnirehtet_exe = os.path.join(self.root_path, 'gnirehtet', | |
- 'linux64', 'gnirehtet') | |
+ if os.uname()[4].startswith("arm"): | |
+ self.gnirehtet_exe = os.path.join( | |
+ self.root_path, "gnirehtet", "arm", "gnirehtet" | |
+ ) | |
+ elif platform.architecture()[0] == "64bit": | |
+ self.gnirehtet_exe = os.path.join( | |
+ self.root_path, "gnirehtet", "linux64", "gnirehtet" | |
+ ) | |
if self.gnirehtet_exe is not None: | |
from .os_util import kill_all | |
+ | |
kill_all(os.path.basename(self.gnirehtet_exe), True) | |
- self.exe = 'adb' | |
+ self.exe = "adb" | |
def run(self, cmd, timeout_sec=60, silent=False): | |
"""Run a shell command with a time limit and get the output""" | |
if not silent: | |
- logging.debug(' '.join(cmd)) | |
+ logging.debug(" ".join(cmd)) | |
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | |
return self.wait_for_process(proc, timeout_sec, silent) | |
def wait_for_process(self, proc, timeout_sec=10, silent=False): | |
"""Wait for the given process to exit gracefully and return the result""" | |
@@ -77,227 +83,253 @@ | |
timer.start() | |
stdout, _ = proc.communicate() | |
if not silent and stdout is not None and len(stdout): | |
logging.debug(stdout[:100]) | |
except Exception: | |
- logging.debug('Error waiting for process to exit') | |
+ logging.debug("Error waiting for process to exit") | |
finally: | |
if timer is not None: | |
timer.cancel() | |
return stdout | |
- | |
def build_adb_command(self, args): | |
"""Build an adb command with the (optional) device ID""" | |
cmd = [self.exe] | |
if self.device is not None: | |
- cmd.extend(['-s', self.device]) | |
+ cmd.extend(["-s", self.device]) | |
cmd.extend(args) | |
return cmd | |
def shell(self, args, timeout_sec=60, silent=False): | |
"""Run an adb shell command""" | |
- cmd = self.build_adb_command(['shell']) | |
+ cmd = self.build_adb_command(["shell"]) | |
cmd.extend(args) | |
return self.run(cmd, timeout_sec, silent) | |
# pylint: disable=C0103 | |
def su(self, command, timeout_sec=60, silent=False): | |
"""Run a command as su""" | |
- cmd = ['su', '-c', command] | |
+ cmd = ["su", "-c", command] | |
return self.shell(cmd, timeout_sec, silent) | |
+ | |
# pylint: enable=C0103 | |
def adb(self, args, silent=False): | |
"""Run an arbitrary adb command""" | |
cmd = self.build_adb_command(args) | |
if not silent: | |
- logging.debug(' '.join(cmd)) | |
+ logging.debug(" ".join(cmd)) | |
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | |
self.wait_for_process(proc, 120, silent) | |
return bool(proc.returncode is not None and proc.returncode == 0) | |
# pylint: disable=E1101 | |
def start(self): | |
""" Do some startup check to make sure adb is installed""" | |
import psutil | |
+ | |
ret = False | |
- out = self.run(self.build_adb_command(['devices'])) | |
+ out = self.run(self.build_adb_command(["devices"])) | |
if out is not None: | |
ret = True | |
# Set the CPU affinity for adb which helps avoid hangs | |
for proc in psutil.process_iter(): | |
- if proc.name() == "adb.exe" or proc.name() == "adb" or proc.name() == "adb-arm": | |
+ if ( | |
+ proc.name() == "adb.exe" | |
+ or proc.name() == "adb" | |
+ or proc.name() == "adb-arm" | |
+ ): | |
proc.cpu_affinity([0]) | |
# install the tun0 device if necessary | |
if self.options.vpntether and platform.system() == "Linux": | |
- self.sudo(['ip', 'tuntap', 'add', 'dev', 'tun0', 'mode', 'tun']) | |
+ self.sudo(["ip", "tuntap", "add", "dev", "tun0", "mode", "tun"]) | |
# Start the simple-rt process if needed | |
self.simplert_path = None | |
- if self.options.simplert is not None and platform.system() == 'Linux': | |
+ if self.options.simplert is not None and platform.system() == "Linux": | |
running = False | |
- stdout = subprocess.check_output(['ps', 'ax']) | |
- if stdout.find('simple-rt ') > -1: | |
+ stdout = subprocess.check_output(["ps", "ax"]) | |
+ if stdout.find("simple-rt ") > -1: | |
running = True | |
- logging.debug('simple-rt is already running') | |
+ logging.debug("simple-rt is already running") | |
if not running: | |
- if os.uname()[4].startswith('arm'): | |
- self.simplert_path = os.path.join(self.root_path, 'simple-rt', 'arm') | |
- elif platform.architecture()[0] == '64bit': | |
- self.simplert_path = os.path.join(self.root_path, 'simple-rt', 'linux64') | |
+ if os.uname()[4].startswith("arm"): | |
+ self.simplert_path = os.path.join( | |
+ self.root_path, "simple-rt", "arm" | |
+ ) | |
+ elif platform.architecture()[0] == "64bit": | |
+ self.simplert_path = os.path.join( | |
+ self.root_path, "simple-rt", "linux64" | |
+ ) | |
if self.simplert_path is not None: | |
- self.shell(['am', 'force-stop', 'com.viper.simplert']) | |
- logging.debug('Starting simple-rt bridge process') | |
- interface, dns = self.options.simplert.split(',', 1) | |
- exe = os.path.join(self.simplert_path, 'simple-rt') | |
- command = ['sudo', exe, '-i', interface] | |
+ self.shell(["am", "force-stop", "com.viper.simplert"]) | |
+ logging.debug("Starting simple-rt bridge process") | |
+ interface, dns = self.options.simplert.split(",", 1) | |
+ exe = os.path.join(self.simplert_path, "simple-rt") | |
+ command = ["sudo", exe, "-i", interface] | |
if dns is not None and len(dns): | |
- command.extend(['-n', dns]) | |
- self.simplert = subprocess.Popen(' '.join(command), shell=True, | |
- cwd=self.simplert_path) | |
+ command.extend(["-n", dns]) | |
+ self.simplert = subprocess.Popen( | |
+ " ".join(command), shell=True, cwd=self.simplert_path | |
+ ) | |
return ret | |
+ | |
# pylint: enable=E1101 | |
def stop(self): | |
"""Shut down anything necessary""" | |
if self.simplert is not None: | |
- self.shell(['am', 'force-stop', 'com.viper.simplert']) | |
- logging.debug('Stopping simple-rt bridge process') | |
- subprocess.call(['sudo', 'killall', 'simple-rt']) | |
+ self.shell(["am", "force-stop", "com.viper.simplert"]) | |
+ logging.debug("Stopping simple-rt bridge process") | |
+ subprocess.call(["sudo", "killall", "simple-rt"]) | |
self.simplert = None | |
if self.options.vpntether and platform.system() == "Linux": | |
if self.vpn_forwarder is not None: | |
try: | |
self.vpn_forwarder.write("\n") | |
time.sleep(0.5) | |
- subprocess.call(['sudo', 'killall', 'forwarder']) | |
+ subprocess.call(["sudo", "killall", "forwarder"]) | |
self.vpn_forwarder.close() | |
except Exception: | |
pass | |
self.vpn_forwarder = None | |
- self.shell(['am', 'force-stop', 'com.google.android.vpntether']) | |
+ self.shell(["am", "force-stop", "com.google.android.vpntether"]) | |
if self.gnirehtet_exe is not None: | |
try: | |
- subprocess.call([self.gnirehtet_exe, 'stop']) | |
+ subprocess.call([self.gnirehtet_exe, "stop"]) | |
if self.gnirehtet is not None: | |
self.gnirehtet.terminate() | |
self.gnirehtet.communicate() | |
self.gnirehtet = None | |
from .os_util import kill_all | |
+ | |
kill_all(os.path.basename(self.gnirehtet_exe), True) | |
except Exception: | |
pass | |
- | |
- def kill_proc(self, procname, kill_signal='-SIGINT'): | |
+ def kill_proc(self, procname, kill_signal="-SIGINT"): | |
"""Kill all processes with the given name""" | |
- out = self.shell(['ps', '|', 'grep', procname]) | |
+ out = self.shell(["ps", "|", "grep", procname]) | |
if out is not None: | |
for line in out.splitlines(): | |
- match = re.search(r'^\s*[^\s]+\s+(\d+)', line) | |
+ match = re.search(r"^\s*[^\s]+\s+(\d+)", line) | |
if match: | |
pid = match.group(1) | |
- self.shell(['kill', kill_signal, pid]) | |
- | |
- def kill_proc_su(self, procname, kill_signal='-SIGINT'): | |
+ self.shell(["kill", kill_signal, pid]) | |
+ | |
+ def kill_proc_su(self, procname, kill_signal="-SIGINT"): | |
"""Kill all processes with the given name""" | |
- out = self.su('ps') | |
+ out = self.su("ps") | |
if out is not None: | |
for line in out.splitlines(): | |
if line.find(procname) >= 0: | |
- match = re.search(r'^\s*[^\s]+\s+(\d+)', line) | |
+ match = re.search(r"^\s*[^\s]+\s+(\d+)", line) | |
if match: | |
pid = match.group(1) | |
- self.su('kill {0} {1}'.format(kill_signal, pid)) | |
+ self.su("kill {0} {1}".format(kill_signal, pid)) | |
def start_screenrecord(self): | |
"""Start a screenrecord session on the device""" | |
- self.shell(['rm', '/data/local/tmp/wpt_video.mp4']) | |
+ self.shell(["rm", "/data/local/tmp/wpt_video.mp4"]) | |
try: | |
- cmd = self.build_adb_command(['shell', 'screenrecord', '--verbose', | |
- '--bit-rate', '8000000', | |
- '/data/local/tmp/wpt_video.mp4']) | |
+ cmd = self.build_adb_command( | |
+ [ | |
+ "shell", | |
+ "screenrecord", | |
+ "--verbose", | |
+ "--bit-rate", | |
+ "8000000", | |
+ "/data/local/tmp/wpt_video.mp4", | |
+ ] | |
+ ) | |
self.screenrecord = subprocess.Popen(cmd) | |
except Exception: | |
pass | |
def stop_screenrecord(self, local_file): | |
"""Stop a screen record and download the video to local_file""" | |
if self.screenrecord is not None: | |
- logging.debug('Stopping screenrecord') | |
- self.kill_proc('screenrecord') | |
+ logging.debug("Stopping screenrecord") | |
+ self.kill_proc("screenrecord") | |
self.wait_for_process(self.screenrecord) | |
self.screenrecord = None | |
- self.adb(['pull', '/data/local/tmp/wpt_video.mp4', local_file]) | |
- self.shell(['rm', '/data/local/tmp/wpt_video.mp4']) | |
+ self.adb(["pull", "/data/local/tmp/wpt_video.mp4", local_file]) | |
+ self.shell(["rm", "/data/local/tmp/wpt_video.mp4"]) | |
def start_tcpdump(self): | |
"""Start a tcpdump capture""" | |
- tcpdump_binary = '/data/local/tmp/tcpdump474' | |
- capture_file = '/data/local/tmp/tcpdump.cap' | |
- local_binary = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'android', 'tcpdump') | |
- out = self.su('ls {0}'.format(tcpdump_binary)) | |
- if out.find('No such') > -1: | |
- self.adb(['push', local_binary, tcpdump_binary]) | |
- self.su('chown root {0}'.format(tcpdump_binary)) | |
- self.su('chmod 755 {0}'.format(tcpdump_binary)) | |
- cmd = self.build_adb_command(['shell', 'su', '-c', | |
- '{0} -i any -p -s 0 -w {1}'.format(tcpdump_binary, | |
- capture_file)]) | |
+ tcpdump_binary = "/data/local/tmp/tcpdump474" | |
+ capture_file = "/data/local/tmp/tcpdump.cap" | |
+ local_binary = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "android", "tcpdump" | |
+ ) | |
+ out = self.su("ls {0}".format(tcpdump_binary)) | |
+ if out.find("No such") > -1: | |
+ self.adb(["push", local_binary, tcpdump_binary]) | |
+ self.su("chown root {0}".format(tcpdump_binary)) | |
+ self.su("chmod 755 {0}".format(tcpdump_binary)) | |
+ cmd = self.build_adb_command( | |
+ [ | |
+ "shell", | |
+ "su", | |
+ "-c", | |
+ "{0} -i any -p -s 0 -w {1}".format(tcpdump_binary, capture_file), | |
+ ] | |
+ ) | |
try: | |
- logging.debug(' '.join(cmd)) | |
+ logging.debug(" ".join(cmd)) | |
self.tcpdump = subprocess.Popen(cmd) | |
except Exception: | |
pass | |
def stop_tcpdump(self, local_file): | |
"""Stop a tcpdump capture and download to local_file""" | |
if self.tcpdump is not None: | |
- logging.debug('Stopping tcpdump') | |
- capture_file = '/data/local/tmp/tcpdump.cap' | |
- self.kill_proc_su('tcpdump474') | |
+ logging.debug("Stopping tcpdump") | |
+ capture_file = "/data/local/tmp/tcpdump.cap" | |
+ self.kill_proc_su("tcpdump474") | |
self.wait_for_process(self.tcpdump) | |
self.tcpdump = None | |
- self.su('chmod 666 {0}'.format(capture_file)) | |
- self.adb(['pull', capture_file, local_file]) | |
- self.su('rm {0}'.format(capture_file)) | |
+ self.su("chmod 666 {0}".format(capture_file)) | |
+ self.adb(["pull", capture_file, local_file]) | |
+ self.su("rm {0}".format(capture_file)) | |
def get_battery_stats(self): | |
"""Get the temperature andlevel of the battery""" | |
ret = {} | |
- out = self.shell(['dumpsys', 'battery'], silent=True) | |
+ out = self.shell(["dumpsys", "battery"], silent=True) | |
if out is not None: | |
for line in out.splitlines(): | |
- match = re.search(r'^\s*level:\s*(\d+)', line) | |
+ match = re.search(r"^\s*level:\s*(\d+)", line) | |
if match: | |
- ret['level'] = int(match.group(1)) | |
- match = re.search(r'^\s*temperature:\s*(\d+)', line) | |
+ ret["level"] = int(match.group(1)) | |
+ match = re.search(r"^\s*temperature:\s*(\d+)", line) | |
if match: | |
- ret['temp'] = float(match.group(1)) / 10.0 | |
+ ret["temp"] = float(match.group(1)) / 10.0 | |
return ret | |
def ping(self, address): | |
"""Ping the provided network address""" | |
ret = None | |
if address is not None: | |
- out = self.shell(['ping', '-n', '-c3', '-i0.2', '-w5', address], silent=True) | |
+ out = self.shell( | |
+ ["ping", "-n", "-c3", "-i0.2", "-w5", address], silent=True | |
+ ) | |
if out is not None: | |
for line in out.splitlines(): | |
- match = re.search(r'^\s*rtt\s[^=]*=\s*([\d\.]*)', line) | |
+ match = re.search(r"^\s*rtt\s[^=]*=\s*([\d\.]*)", line) | |
if match: | |
ret = float(match.group(1)) | |
if ret is None: | |
- logging.debug('%s is unreachable', address) | |
+ logging.debug("%s is unreachable", address) | |
else: | |
- logging.debug('%s rtt %0.3f ms', address, ret) | |
+ logging.debug("%s rtt %0.3f ms", address, ret) | |
return ret | |
def is_installed(self, package): | |
"""See if the given package is installed""" | |
ret = False | |
- out = self.shell(['pm', 'list', 'packages'], silent=True) | |
+ out = self.shell(["pm", "list", "packages"], silent=True) | |
if out is not None: | |
for line in out.splitlines(): | |
if line.find(package) >= 0: | |
ret = True | |
break | |
@@ -305,311 +337,416 @@ | |
def cleanup_device(self): | |
"""Do some device-level cleanup""" | |
start = monotonic.monotonic() | |
# Simulate pressing the home button to dismiss any UI | |
- self.shell(['input', 'keyevent', '3']) | |
+ self.shell(["input", "keyevent", "3"]) | |
# Clear notifications | |
- self.shell(['settings', 'put', 'global', 'heads_up_notifications_enabled', '0']) | |
- self.su('service call notification 1') | |
+ self.shell(["settings", "put", "global", "heads_up_notifications_enabled", "0"]) | |
+ self.su("service call notification 1") | |
# Close some known apps that pop-over | |
for app in self.known_apps: | |
- if 'installed' not in self.known_apps[app]: | |
- out = self.shell(['dumpsys', 'package', app, '|', 'grep', 'versionName']) | |
- self.known_apps[app]['installed'] = bool(out is not None and len(out.strip())) | |
- if self.known_apps[app]['installed']: | |
- self.shell(['am', 'force-stop', app]) | |
+ if "installed" not in self.known_apps[app]: | |
+ out = self.shell( | |
+ ["dumpsys", "package", app, "|", "grep", "versionName"] | |
+ ) | |
+ self.known_apps[app]["installed"] = bool( | |
+ out is not None and len(out.strip()) | |
+ ) | |
+ if self.known_apps[app]["installed"]: | |
+ self.shell(["am", "force-stop", app]) | |
# Cleanup the downloads folders | |
- self.shell(['rm', '-rf', '/sdcard/Download/*', '/sdcard/Backucup', '/sdcard/UCDownloads', | |
- '/data/local/tmp/tcpdump.cap', '/data/local/tmp/wpt_video.mp4']) | |
- self.su('rm -rf /data/media/0/Download/* /data/media/0/Backucup '\ | |
- '/data/media/0/UCDownloads /data/data/com.UCMobile.intl/wa/sv/*') | |
+ self.shell( | |
+ [ | |
+ "rm", | |
+ "-rf", | |
+ "/sdcard/Download/*", | |
+ "/sdcard/Backucup", | |
+ "/sdcard/UCDownloads", | |
+ "/data/local/tmp/tcpdump.cap", | |
+ "/data/local/tmp/wpt_video.mp4", | |
+ ] | |
+ ) | |
+ self.su( | |
+ "rm -rf /data/media/0/Download/* /data/media/0/Backucup " | |
+ "/data/media/0/UCDownloads /data/data/com.UCMobile.intl/wa/sv/*" | |
+ ) | |
# Clean up some system apps that collect cruft | |
- self.shell(['pm', 'clear', 'com.android.providers.downloads']) | |
- self.shell(['pm', 'clear', 'com.google.android.googlequicksearchbox']) | |
- self.shell(['pm', 'clear', 'com.google.android.youtube']) | |
- self.shell(['pm', 'clear', 'com.motorola.motocare']) | |
+ self.shell(["pm", "clear", "com.android.providers.downloads"]) | |
+ self.shell(["pm", "clear", "com.google.android.googlequicksearchbox"]) | |
+ self.shell(["pm", "clear", "com.google.android.youtube"]) | |
+ self.shell(["pm", "clear", "com.motorola.motocare"]) | |
# in case busybox is installed, try a manual fstrim | |
- self.su('fstrim -v /data') | |
+ self.su("fstrim -v /data") | |
# See if there are any system dialogs that need dismissing | |
- out = self.shell(['dumpsys', 'window', 'windows'], silent=True) | |
- if re.search(r'Window #[^\n]*Application Error\:', out) is not None or \ | |
- re.search(r'Window #[^\n]*systemui\.usb\.UsbDebuggingActivity', out) is not None: | |
- logging.warning('Dismissing system dialog') | |
- self.shell(['input', 'keyevent', 'KEYCODE_DPAD_RIGHT'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_DPAD_RIGHT'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_ENTER'], silent=True) | |
- if out.find('com.google.android.apps.gsa.staticplugins.opa.errorui.OpaErrorActivity') >= 0: | |
- self.shell(['am', 'force-stop', 'com.google.android.googlequicksearchbox']) | |
- if out.find('com.motorola.ccc.ota/com.motorola.ccc.ota.ui.DownloadActivity') >= 0: | |
- self.shell(['am', 'force-stop', 'com.motorola.ccc.ota']) | |
+ out = self.shell(["dumpsys", "window", "windows"], silent=True) | |
+ if ( | |
+ re.search(r"Window #[^\n]*Application Error\:", out) is not None | |
+ or re.search(r"Window #[^\n]*systemui\.usb\.UsbDebuggingActivity", out) | |
+ is not None | |
+ ): | |
+ logging.warning("Dismissing system dialog") | |
+ self.shell(["input", "keyevent", "KEYCODE_DPAD_RIGHT"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_DPAD_RIGHT"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_ENTER"], silent=True) | |
+ if ( | |
+ out.find( | |
+ "com.google.android.apps.gsa.staticplugins.opa.errorui.OpaErrorActivity" | |
+ ) | |
+ >= 0 | |
+ ): | |
+ self.shell(["am", "force-stop", "com.google.android.googlequicksearchbox"]) | |
+ if ( | |
+ out.find("com.motorola.ccc.ota/com.motorola.ccc.ota.ui.DownloadActivity") | |
+ >= 0 | |
+ ): | |
+ self.shell(["am", "force-stop", "com.motorola.ccc.ota"]) | |
# reboot the phone and exit the agent if it is running EXTREMELY slowly | |
elapsed = monotonic.monotonic() - start | |
if elapsed > 300: | |
- logging.debug("Cleanup took %0.3f seconds. Rebooting the phone and restarting agent", | |
- elapsed) | |
- self.adb(['reboot']) | |
+ logging.debug( | |
+ "Cleanup took %0.3f seconds. Rebooting the phone and restarting agent", | |
+ elapsed, | |
+ ) | |
+ self.adb(["reboot"]) | |
self.needs_exit = True | |
- | |
def get_rndis_interface(self): | |
"""Return the name of the rndis interface, it's state and assigned address""" | |
interface = None | |
if_state = None | |
address = None | |
- out = self.shell(['ip', 'address', 'show'], silent=True) | |
+ out = self.shell(["ip", "address", "show"], silent=True) | |
need_address = False | |
if out is not None: | |
for line in out.splitlines(): | |
- match = re.search(r'[\d]+\:\s+([^:]+):[^\n]*state (\w+)', line) | |
+ match = re.search(r"[\d]+\:\s+([^:]+):[^\n]*state (\w+)", line) | |
if match: | |
need_address = False | |
iface = match.group(1) | |
- if iface == 'rndis0': | |
+ if iface == "rndis0": | |
interface = iface | |
if_state = match.group(2) | |
if_state = if_state.lower() | |
address = None | |
need_address = True | |
- elif interface is None and iface == 'usb0': | |
+ elif interface is None and iface == "usb0": | |
interface = iface | |
if_state = match.group(2) | |
if_state = if_state.lower() | |
address = None | |
need_address = True | |
elif need_address: | |
- match = re.search(r'^\s*inet ([\d\.]+)', line) | |
+ match = re.search(r"^\s*inet ([\d\.]+)", line) | |
if match: | |
address = match.group(1) | |
return interface, if_state, address | |
def check_rndis(self): | |
"""Bring up the rndis interface if it isn't up""" | |
rndis_ready = False | |
- is_dhcp = bool(self.rndis == 'dhcp') | |
+ is_dhcp = bool(self.rndis == "dhcp") | |
rndis_address = None | |
if not is_dhcp: | |
- match = re.search(r'^([\d\.]+\/\d+),([\d\.]+),([\d\.]+),([\d\.]+)', self.rndis) | |
+ match = re.search( | |
+ r"^([\d\.]+\/\d+),([\d\.]+),([\d\.]+),([\d\.]+)", self.rndis | |
+ ) | |
if match: | |
- rndis_address = {'addr': match.group(1), | |
- 'gateway': match.group(2), | |
- 'dns1': match.group(3), | |
- 'dns2': match.group(4)} | |
+ rndis_address = { | |
+ "addr": match.group(1), | |
+ "gateway": match.group(2), | |
+ "dns1": match.group(3), | |
+ "dns2": match.group(4), | |
+ } | |
else: | |
- logging.error('Invalid rndis address config: %s', self.rndis) | |
+ logging.error("Invalid rndis address config: %s", self.rndis) | |
interface, if_state, address = self.get_rndis_interface() | |
- if interface is not None and if_state == 'up' and address is not None: | |
+ if interface is not None and if_state == "up" and address is not None: | |
rndis_ready = True | |
elif is_dhcp or rndis_address is not None: | |
# Make sure the USB interface is configured for rndis | |
- out = self.shell(['getprop', 'sys.usb.config'], silent=True) | |
- if out.strip() != 'rndis,adb': | |
- logging.debug('Enabling rndis USB mode') | |
- self.su('setprop sys.usb.config rndis,adb') | |
- self.adb(['wait-for-device']) | |
+ out = self.shell(["getprop", "sys.usb.config"], silent=True) | |
+ if out.strip() != "rndis,adb": | |
+ logging.debug("Enabling rndis USB mode") | |
+ self.su("setprop sys.usb.config rndis,adb") | |
+ self.adb(["wait-for-device"]) | |
# Enable tethering (function is different depending on Android version) | |
- tether_function = '34' | |
+ tether_function = "34" | |
if self.short_version >= 6.0: | |
- tether_function = '41' if self.kernel == 'android-samsung' else '30' | |
+ tether_function = "41" if self.kernel == "android-samsung" else "30" | |
elif self.short_version >= 5.1: | |
- tether_function = '31' | |
+ tether_function = "31" | |
elif self.short_version >= 5.0: | |
- tether_function = '30' | |
+ tether_function = "30" | |
elif self.short_version >= 4.4: | |
- tether_function = '34' | |
+ tether_function = "34" | |
elif self.short_version >= 4.1: | |
- tether_function = '33' | |
+ tether_function = "33" | |
elif self.short_version >= 4.0: | |
- tether_function = '32' | |
- self.su('service call connectivity {0} i32 1'.format(tether_function)) | |
- self.adb(['wait-for-device']) | |
+ tether_function = "32" | |
+ self.su("service call connectivity {0} i32 1".format(tether_function)) | |
+ self.adb(["wait-for-device"]) | |
interface, if_state, address = self.get_rndis_interface() | |
if interface is not None: | |
- self.su('svc wifi disable') | |
+ self.su("svc wifi disable") | |
# turn down all of the other interfaces | |
- out = self.su('ip link show') | |
+ out = self.su("ip link show") | |
if out is not None: | |
for line in out: | |
- match = re.search(r'[\d]+\:\s+([^:]+):[^\n]*state (\w+)', line) | |
+ match = re.search(r"[\d]+\:\s+([^:]+):[^\n]*state (\w+)", line) | |
if match: | |
iface = match.group(1) | |
- if iface != interface and iface != 'lo' and iface[:4] != 'wlan': | |
- self.su('ip link set {0} down'.format(iface)) | |
+ if ( | |
+ iface != interface | |
+ and iface != "lo" | |
+ and iface[:4] != "wlan" | |
+ ): | |
+ self.su("ip link set {0} down".format(iface)) | |
if rndis_address is not None: | |
# Set up the address | |
- self.su('ip rule add from all lookup main') | |
- self.su('ip link set {0} down'.format(interface)) | |
- self.su('ip addr flush dev {0}'.format(interface)) | |
- self.su('ip addr add {0} dev {1}'.format(rndis_address['addr'], interface)) | |
- self.su('ip link set {0} up'.format(interface)) | |
+ self.su("ip rule add from all lookup main") | |
+ self.su("ip link set {0} down".format(interface)) | |
+ self.su("ip addr flush dev {0}".format(interface)) | |
+ self.su( | |
+ "ip addr add {0} dev {1}".format( | |
+ rndis_address["addr"], interface | |
+ ) | |
+ ) | |
+ self.su("ip link set {0} up".format(interface)) | |
# Set up the gateway | |
- self.su('route add -net 0.0.0.0 netmask 0.0.0.0 gw {0} dev {1}'.format( | |
- rndis_address['gateway'], interface)) | |
- self.su('setprop net.{0}.gw {1}'.format(interface, rndis_address['gateway'])) | |
- self.su('setprop net.{0}.gateway {1}'.format(interface, | |
- rndis_address['gateway'])) | |
+ self.su( | |
+ "route add -net 0.0.0.0 netmask 0.0.0.0 gw {0} dev {1}".format( | |
+ rndis_address["gateway"], interface | |
+ ) | |
+ ) | |
+ self.su( | |
+ "setprop net.{0}.gw {1}".format( | |
+ interface, rndis_address["gateway"] | |
+ ) | |
+ ) | |
+ self.su( | |
+ "setprop net.{0}.gateway {1}".format( | |
+ interface, rndis_address["gateway"] | |
+ ) | |
+ ) | |
# Configure DNS | |
- self.su('setprop net.dns1 {0}'.format(rndis_address['dns1'])) | |
- self.su('setprop net.dns2 {0}'.format(rndis_address['dns2'])) | |
- self.su('setprop net.{0}.dns1 {1}'.format(interface, rndis_address['dns1'])) | |
- self.su('setprop net.{0}.dns2 {1}'.format(interface, rndis_address['dns2'])) | |
- self.su('ndc resolver setifdns {0} {1} {2}'.format(interface, | |
- rndis_address['dns1'], | |
- rndis_address['dns2'])) | |
- self.su('ndc resolver setdefaultif {0}'.format(interface)) | |
+ self.su("setprop net.dns1 {0}".format(rndis_address["dns1"])) | |
+ self.su("setprop net.dns2 {0}".format(rndis_address["dns2"])) | |
+ self.su( | |
+ "setprop net.{0}.dns1 {1}".format( | |
+ interface, rndis_address["dns1"] | |
+ ) | |
+ ) | |
+ self.su( | |
+ "setprop net.{0}.dns2 {1}".format( | |
+ interface, rndis_address["dns2"] | |
+ ) | |
+ ) | |
+ self.su( | |
+ "ndc resolver setifdns {0} {1} {2}".format( | |
+ interface, rndis_address["dns1"], rndis_address["dns2"] | |
+ ) | |
+ ) | |
+ self.su("ndc resolver setdefaultif {0}".format(interface)) | |
# Misc settings | |
self.su('setprop "net.gprs.http-proxy" ""') | |
interface, if_state, address = self.get_rndis_interface() | |
- if interface is not None and if_state == 'up' and address is not None: | |
+ if ( | |
+ interface is not None | |
+ and if_state == "up" | |
+ and address is not None | |
+ ): | |
rndis_ready = True | |
elif is_dhcp: | |
- self.su('netcfg {0} dhcp'.format(interface)) | |
+ self.su("netcfg {0} dhcp".format(interface)) | |
return rndis_ready | |
def is_tun_interface_available(self): | |
"""Check to see if tun0 is up""" | |
is_ready = False | |
- out = self.shell(['ip', 'address', 'show'], silent=True) | |
+ out = self.shell(["ip", "address", "show"], silent=True) | |
if out is not None: | |
for line in out.splitlines(): | |
- if re.search(r'^[\d]+\:\s+tun0:', line): | |
+ if re.search(r"^[\d]+\:\s+tun0:", line): | |
is_ready = True | |
return is_ready | |
def dismiss_vpn_dialog(self): | |
"""Check and see if the VPN permission dialog is up and dismiss it""" | |
- out = self.shell(['dumpsys', 'window', 'windows'], silent=True) | |
- if out.find('com.motorola.ccc.ota/com.motorola.ccc.ota.ui.DownloadActivity') >= 0: | |
- self.shell(['am', 'force-stop', 'com.motorola.ccc.ota']) | |
- if out.find('com.android.vpndialogs/com.android.vpndialogs.ConfirmDialog') >= 0: | |
- logging.warning('Dismissing VPN dialog') | |
+ out = self.shell(["dumpsys", "window", "windows"], silent=True) | |
+ if ( | |
+ out.find("com.motorola.ccc.ota/com.motorola.ccc.ota.ui.DownloadActivity") | |
+ >= 0 | |
+ ): | |
+ self.shell(["am", "force-stop", "com.motorola.ccc.ota"]) | |
+ if out.find("com.android.vpndialogs/com.android.vpndialogs.ConfirmDialog") >= 0: | |
+ logging.warning("Dismissing VPN dialog") | |
if self.short_version < 5.0: | |
- self.shell(['input', 'keyevent', 'KEYCODE_DPAD_RIGHT'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_ENTER'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_DPAD_RIGHT'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_ENTER'], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_DPAD_RIGHT"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_ENTER"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_DPAD_RIGHT"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_ENTER"], silent=True) | |
else: | |
- self.shell(['input', 'keyevent', 'KEYCODE_TAB'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_TAB'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_TAB'], silent=True) | |
- self.shell(['input', 'keyevent', 'KEYCODE_ENTER'], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_TAB"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_TAB"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_TAB"], silent=True) | |
+ self.shell(["input", "keyevent", "KEYCODE_ENTER"], silent=True) | |
def reset_simplert(self): | |
"""Reset the tunnel on the phone in case it's state is messed up""" | |
- self.shell(['am', 'force-stop', 'com.viper.simplert']) | |
+ self.shell(["am", "force-stop", "com.viper.simplert"]) | |
def check_simplert(self): | |
"""Bring up the simple-rt bridge if it isn't running""" | |
is_ready = self.is_tun_interface_available() | |
if not is_ready: | |
# disconnect/reconnect the USB interface | |
- self.su('setprop sys.usb.config adb') | |
- self.adb(['wait-for-device']) | |
+ self.su("setprop sys.usb.config adb") | |
+ self.adb(["wait-for-device"]) | |
# wait up to 30 seconds for the interface to come up | |
end_time = monotonic.monotonic() + 30 | |
while not is_ready and monotonic.monotonic() < end_time: | |
time.sleep(1) | |
self.dismiss_vpn_dialog() | |
is_ready = self.is_tun_interface_available() | |
if not is_ready: | |
- logging.debug('simplert bridge not started') | |
+ logging.debug("simplert bridge not started") | |
return is_ready | |
def sudo(self, args): | |
"""Run the given sudo command and return""" | |
- args.insert(0, 'sudo') | |
- logging.debug(' '.join(args)) | |
+ args.insert(0, "sudo") | |
+ logging.debug(" ".join(args)) | |
return subprocess.call(args) | |
# pylint: disable=E1101 | |
def check_vpntether(self): | |
"""Install and bring up the vpn-reverse-tether bridge if necessary""" | |
is_ready = False | |
- if self.ping('172.31.0.1') is not None and self.is_tun_interface_available(): | |
+ if self.ping("172.31.0.1") is not None and self.is_tun_interface_available(): | |
is_ready = True | |
elif platform.system() == "Linux": | |
- interface, dns_server = self.options.vpntether.split(',', 1) | |
+ interface, dns_server = self.options.vpntether.split(",", 1) | |
if self.vpn_forwarder is not None: | |
try: | |
self.vpn_forwarder.write("\n") | |
time.sleep(0.5) | |
- subprocess.call(['sudo', 'killall', 'forwarder']) | |
+ subprocess.call(["sudo", "killall", "forwarder"]) | |
self.vpn_forwarder.close() | |
except Exception: | |
pass | |
self.vpn_forwarder = None | |
- self.shell(['am', 'force-stop', 'com.google.android.vpntether']) | |
- if not self.is_installed('com.google.android.vpntether'): | |
- apk = os.path.join(self.root_path, 'vpn-reverse-tether', 'Android', | |
- 'VpnReverseTether.apk') | |
- self.adb(['install', apk]) | |
+ self.shell(["am", "force-stop", "com.google.android.vpntether"]) | |
+ if not self.is_installed("com.google.android.vpntether"): | |
+ apk = os.path.join( | |
+ self.root_path, | |
+ "vpn-reverse-tether", | |
+ "Android", | |
+ "VpnReverseTether.apk", | |
+ ) | |
+ self.adb(["install", apk]) | |
# Set up the host for forwarding | |
- self.sudo(['ip', 'tuntap', 'add', 'dev', 'tun0', 'mode', 'tun']) | |
- self.sudo(['sysctl', '-w', 'net.ipv4.ip_forward=1']) | |
- self.sudo(['iptables', '-t', 'nat', '-F']) | |
- self.sudo(['iptables', '-t', 'nat', '-A', 'POSTROUTING', '-s', '172.31.0.0/24', | |
- '-o', interface, '-j', 'MASQUERADE']) | |
- self.sudo(['iptables', '-P', 'FORWARD', 'ACCEPT']) | |
- self.sudo(['ifconfig', 'tun0', '172.31.0.1', 'dstaddr', '172.31.0.2', | |
- 'mtu', '1500', 'up']) | |
- self.adb(['forward', 'tcp:7890', 'localabstract:vpntether']) | |
+ self.sudo(["ip", "tuntap", "add", "dev", "tun0", "mode", "tun"]) | |
+ self.sudo(["sysctl", "-w", "net.ipv4.ip_forward=1"]) | |
+ self.sudo(["iptables", "-t", "nat", "-F"]) | |
+ self.sudo( | |
+ [ | |
+ "iptables", | |
+ "-t", | |
+ "nat", | |
+ "-A", | |
+ "POSTROUTING", | |
+ "-s", | |
+ "172.31.0.0/24", | |
+ "-o", | |
+ interface, | |
+ "-j", | |
+ "MASQUERADE", | |
+ ] | |
+ ) | |
+ self.sudo(["iptables", "-P", "FORWARD", "ACCEPT"]) | |
+ self.sudo( | |
+ [ | |
+ "ifconfig", | |
+ "tun0", | |
+ "172.31.0.1", | |
+ "dstaddr", | |
+ "172.31.0.2", | |
+ "mtu", | |
+ "1500", | |
+ "up", | |
+ ] | |
+ ) | |
+ self.adb(["forward", "tcp:7890", "localabstract:vpntether"]) | |
self.cleanup_device() | |
# Start the tether app | |
- self.shell(['am', 'start', '-n', | |
- 'com.google.android.vpntether/vpntether.StartActivity', | |
- '-e', 'SOCKNAME', 'vpntether']) | |
- forwarder = os.path.join(self.root_path, 'vpn-reverse-tether') | |
- if os.uname()[4].startswith('arm'): | |
- forwarder = os.path.join(forwarder, 'arm') | |
- elif platform.architecture()[0] == '64bit': | |
- forwarder = os.path.join(forwarder, 'amd64') | |
- forwarder = os.path.join(forwarder, 'forwarder') | |
+ self.shell( | |
+ [ | |
+ "am", | |
+ "start", | |
+ "-n", | |
+ "com.google.android.vpntether/vpntether.StartActivity", | |
+ "-e", | |
+ "SOCKNAME", | |
+ "vpntether", | |
+ ] | |
+ ) | |
+ forwarder = os.path.join(self.root_path, "vpn-reverse-tether") | |
+ if os.uname()[4].startswith("arm"): | |
+ forwarder = os.path.join(forwarder, "arm") | |
+ elif platform.architecture()[0] == "64bit": | |
+ forwarder = os.path.join(forwarder, "amd64") | |
+ forwarder = os.path.join(forwarder, "forwarder") | |
# Give the app time to start before trying to connect to it | |
time.sleep(5) | |
self.dismiss_vpn_dialog() | |
- command = 'sudo "{0}" tun0 7890 -m 1500 -a 172.31.0.2 32 -d {1} -r 0.0.0.0 0'\ | |
- ' -n webpagetest'.format(forwarder, dns_server) | |
+ command = ( | |
+ 'sudo "{0}" tun0 7890 -m 1500 -a 172.31.0.2 32 -d {1} -r 0.0.0.0 0' | |
+ " -n webpagetest".format(forwarder, dns_server) | |
+ ) | |
logging.debug(command) | |
- self.vpn_forwarder = os.popen(command, 'w') | |
+ self.vpn_forwarder = os.popen(command, "w") | |
# Simulate pressing the home button to dismiss any UI | |
- self.shell(['input', 'keyevent', '3']) | |
+ self.shell(["input", "keyevent", "3"]) | |
return is_ready | |
+ | |
# pylint: enable=E1101 | |
def check_gnirehtet(self): | |
"""Install and bring up the gnirehtet bridge if necessary""" | |
is_ready = False | |
if self.is_tun_interface_available(): | |
is_ready = True | |
elif self.gnirehtet_exe is not None: | |
- interface, dns_server = self.options.gnirehtet.split(',', 1) | |
+ interface, dns_server = self.options.gnirehtet.split(",", 1) | |
if self.gnirehtet is not None: | |
try: | |
- subprocess.call([self.gnirehtet_exe, 'stop']) | |
+ subprocess.call([self.gnirehtet_exe, "stop"]) | |
self.gnirehtet.terminate() | |
self.gnirehtet.communicate() | |
self.gnirehtet = None | |
except Exception: | |
pass | |
self.gnirehtet = None | |
from .os_util import kill_all | |
+ | |
kill_all(os.path.basename(self.gnirehtet_exe), True) | |
- self.shell(['am', 'force-stop', 'com.genymobile.gnirehtet']) | |
- if not self.is_installed('com.genymobile.gnirehtet'): | |
- apk = os.path.join(self.root_path, 'gnirehtet', 'gnirehtet.apk') | |
- self.adb(['install', apk]) | |
+ self.shell(["am", "force-stop", "com.genymobile.gnirehtet"]) | |
+ if not self.is_installed("com.genymobile.gnirehtet"): | |
+ apk = os.path.join(self.root_path, "gnirehtet", "gnirehtet.apk") | |
+ self.adb(["install", apk]) | |
self.cleanup_device() | |
# Start tethering | |
- args = [self.gnirehtet_exe, 'run'] | |
- logging.debug(' '.join(args)) | |
+ args = [self.gnirehtet_exe, "run"] | |
+ logging.debug(" ".join(args)) | |
self.gnirehtet = subprocess.Popen(args) | |
# Give the app time to start before trying to connect to it | |
time.sleep(5) | |
self.dismiss_vpn_dialog() | |
# Simulate pressing the home button to dismiss any UI | |
- self.shell(['input', 'keyevent', '3']) | |
+ self.shell(["input", "keyevent", "3"]) | |
end = monotonic.monotonic() + 30 | |
while not is_ready and monotonic.monotonic() < end: | |
if self.is_tun_interface_available(): | |
is_ready = True | |
else: | |
@@ -619,86 +756,98 @@ | |
def is_device_ready(self): | |
"""Check to see if the device is ready to run tests""" | |
is_ready = True | |
if self.version is None: | |
# Turn down the volume (just one notch each time it is run) | |
- self.shell(['input', 'keyevent', '25']) | |
+ self.shell(["input", "keyevent", "25"]) | |
self.cleanup_device() | |
- out = self.shell(['getprop', 'ro.build.version.release'], silent=True) | |
+ out = self.shell(["getprop", "ro.build.version.release"], silent=True) | |
if out is not None: | |
- self.version = 'Android ' + out.strip() | |
- match = re.search(r'^(\d+\.\d+)', out) | |
+ self.version = "Android " + out.strip() | |
+ match = re.search(r"^(\d+\.\d+)", out) | |
if match: | |
self.short_version = float(match.group(1)) | |
- logging.debug('%s (%0.2f)', self.version, self.short_version) | |
+ logging.debug("%s (%0.2f)", self.version, self.short_version) | |
if self.version is None: | |
- logging.debug('Device not detected') | |
+ logging.debug("Device not detected") | |
return False | |
if self.kernel is None: | |
- out = self.shell(['getprop', 'ro.com.google.clientidbase'], silent=True) | |
+ out = self.shell(["getprop", "ro.com.google.clientidbase"], silent=True) | |
if out is not None: | |
self.kernel = out.strip() | |
battery = self.get_battery_stats() | |
logging.debug(battery) | |
- if 'level' in battery and battery['level'] < 50: | |
- logging.info("Device not ready, low battery: %d %%", battery['level']) | |
+ if "level" in battery and battery["level"] < 50: | |
+ logging.info("Device not ready, low battery: %d %%", battery["level"]) | |
is_ready = False | |
- if 'temp' in battery and battery['temp'] > 36.0: | |
- logging.info("Device not ready, high temperature: %0.1f degrees", battery['temp']) | |
+ if "temp" in battery and battery["temp"] > 36.0: | |
+ logging.info( | |
+ "Device not ready, high temperature: %0.1f degrees", battery["temp"] | |
+ ) | |
is_ready = False | |
# Bring up the bridged interface if necessary | |
if is_ready and self.rndis is not None: | |
is_ready = self.check_rndis() | |
if is_ready and self.options.simplert is not None: | |
is_ready = self.check_simplert() | |
if not is_ready: | |
self.no_network_count += 1 | |
- logging.debug("Networking unavailable - %d attempts to connect failed", | |
- self.no_network_count) | |
+ logging.debug( | |
+ "Networking unavailable - %d attempts to connect failed", | |
+ self.no_network_count, | |
+ ) | |
self.reset_simplert() | |
if is_ready and self.options.vpntether is not None: | |
is_ready = self.check_vpntether() | |
if not is_ready: | |
self.no_network_count += 1 | |
- logging.debug("Networking unavailable - %d attempts to connect failed", | |
- self.no_network_count) | |
+ logging.debug( | |
+ "Networking unavailable - %d attempts to connect failed", | |
+ self.no_network_count, | |
+ ) | |
if is_ready and self.options.gnirehtet is not None: | |
is_ready = self.check_gnirehtet() | |
if not is_ready: | |
self.no_network_count += 1 | |
- logging.debug("Networking unavailable - %d attempts to connect failed", | |
- self.no_network_count) | |
+ logging.debug( | |
+ "Networking unavailable - %d attempts to connect failed", | |
+ self.no_network_count, | |
+ ) | |
# Try pinging the network (prefer the gateway but fall back to DNS or 8.8.8.8) | |
if is_ready and self.options.gnirehtet is None: | |
net_ok = False | |
if self.ping(self.ping_address) is not None: | |
self.no_network_count = 0 | |
self.last_network_ok = monotonic.monotonic() | |
self.rebooted = False | |
net_ok = True | |
else: | |
addresses = [] | |
- props = self.shell(['getprop']) | |
+ props = self.shell(["getprop"]) | |
gateway = None | |
if props is not None: | |
for line in props.splitlines(): | |
- match = re.search(r'^\[net\.dns\d\]:\s+\[([^\]]*)\]', line) | |
+ match = re.search(r"^\[net\.dns\d\]:\s+\[([^\]]*)\]", line) | |
if match: | |
dns = match.group(1) | |
if dns not in addresses: | |
addresses.append(dns) | |
- match = re.search(r'^\[dhcp\.[^\.]+\.dns\d\]:\s+\[([^\]]*)\]', line) | |
+ match = re.search( | |
+ r"^\[dhcp\.[^\.]+\.dns\d\]:\s+\[([^\]]*)\]", line | |
+ ) | |
if match: | |
dns = match.group(1) | |
if dns not in addresses: | |
addresses.append(dns) | |
- match = re.search(r'^\[dhcp\.[^\.]+\.gateway\]:\s+\[([^\]]*)\]', line) | |
+ match = re.search( | |
+ r"^\[dhcp\.[^\.]+\.gateway\]:\s+\[([^\]]*)\]", line | |
+ ) | |
if match: | |
gateway = match.group(1) | |
if gateway is not None: | |
addresses.insert(0, gateway) | |
- addresses.append('8.8.8.8') | |
+ addresses.append("8.8.8.8") | |
for address in addresses: | |
if self.ping(address) is not None: | |
self.ping_address = address | |
net_ok = True | |
break | |
@@ -719,73 +868,77 @@ | |
needs_kick = True | |
elif self.no_network_count > 1 and elapsed > 1800: | |
needs_kick = True | |
if needs_kick: | |
if self.rebooted: | |
- logging.debug("Flagging for exit - %d attempts to connect failed", | |
- self.no_network_count) | |
+ logging.debug( | |
+ "Flagging for exit - %d attempts to connect failed", | |
+ self.no_network_count, | |
+ ) | |
self.needs_exit = True | |
else: | |
- logging.debug("Rebooting device - %d attempts to connect failed", | |
- self.no_network_count) | |
+ logging.debug( | |
+ "Rebooting device - %d attempts to connect failed", | |
+ self.no_network_count, | |
+ ) | |
self.rebooted = True | |
- self.adb(['reboot']) | |
- self.adb(['wait-for-device']) | |
+ self.adb(["reboot"]) | |
+ self.adb(["wait-for-device"]) | |
self.no_network_count = 0 | |
if is_ready and not self.initialized: | |
self.initialized = True | |
# Disable emergency alert notifications | |
- self.su('pm disable com.android.cellbroadcastreceiver') | |
+ self.su("pm disable com.android.cellbroadcastreceiver") | |
return is_ready | |
def get_jiffies_time(self): | |
"""Get the uptime in nanoseconds and jiffies for hz calculation""" | |
- out = self.shell(['cat', '/proc/timer_list'], silent=True) | |
+ out = self.shell(["cat", "/proc/timer_list"], silent=True) | |
nsecs = None | |
jiffies = None | |
if out is not None: | |
for line in out.splitlines(): | |
if nsecs is None: | |
- match = re.search(r'^now at (\d+) nsecs', line) | |
+ match = re.search(r"^now at (\d+) nsecs", line) | |
if match: | |
nsecs = int(match.group(1)) | |
if jiffies is None: | |
- match = re.search(r'^jiffies:\s+(\d+)', line) | |
+ match = re.search(r"^jiffies:\s+(\d+)", line) | |
if match: | |
jiffies = int(match.group(1)) | |
return nsecs, jiffies | |
def get_bytes_rx(self): | |
"""Get the incremental bytes received across all non-loopback interfaces""" | |
bytes_rx = 0 | |
- out = self.shell(['cat', '/proc/net/dev'], silent=True) | |
+ out = self.shell(["cat", "/proc/net/dev"], silent=True) | |
if out is not None: | |
for line in out.splitlines(): | |
- match = re.search(r'^\s*(\w+):\s+(\d+)', line) | |
+ match = re.search(r"^\s*(\w+):\s+(\d+)", line) | |
if match: | |
interface = match.group(1) | |
- if interface != 'lo': | |
+ if interface != "lo": | |
bytes_rx += int(match.group(2)) | |
delta = bytes_rx - self.last_bytes_rx | |
self.last_bytes_rx = bytes_rx | |
return delta | |
def get_video_size(self): | |
"""Get the current size of the video file""" | |
size = 0 | |
- out = self.shell(['ls', '-l', '/data/local/tmp/wpt_video.mp4'], silent=True) | |
- match = re.search(r'[^\d]+\s+(\d+) \d+', out) | |
+ out = self.shell(["ls", "-l", "/data/local/tmp/wpt_video.mp4"], silent=True) | |
+ match = re.search(r"[^\d]+\s+(\d+) \d+", out) | |
if match: | |
size = int(match.group(1)) | |
return size | |
def screenshot(self, dest_file, mogrify): | |
"""Capture a png screenshot of the device""" | |
- device_path = '/data/local/tmp/wpt_screenshot.png' | |
- self.shell(['rm', '/data/local/tmp/wpt_screenshot.png'], silent=True) | |
- self.shell(['screencap', '-p', device_path]) | |
- self.adb(['pull', device_path, dest_file]) | |
+ device_path = "/data/local/tmp/wpt_screenshot.png" | |
+ self.shell(["rm", "/data/local/tmp/wpt_screenshot.png"], silent=True) | |
+ self.shell(["screencap", "-p", device_path]) | |
+ self.adb(["pull", device_path, dest_file]) | |
if os.path.isfile(dest_file): | |
orientation = self.get_orientation() | |
rotation = [0, 270, 180, 90] | |
if orientation > 0 and orientation < 4: | |
angle = rotation[orientation] | |
@@ -794,26 +947,26 @@ | |
subprocess.call(command, shell=True) | |
def get_orientation(self): | |
"""Get the device orientation""" | |
orientation = 0 | |
- out = self.shell(['dumpsys', 'input'], silent=True) | |
- match = re.search(r'SurfaceOrientation: ([\d])', out) | |
+ out = self.shell(["dumpsys", "input"], silent=True) | |
+ match = re.search(r"SurfaceOrientation: ([\d])", out) | |
if match: | |
orientation = int(match.group(1)) | |
return orientation | |
def get_package_version(self, package): | |
"""Get the version number of the given package""" | |
version = None | |
- out = self.shell(['dumpsys', 'package', package, '|', 'grep', 'versionName']) | |
+ out = self.shell(["dumpsys", "package", package, "|", "grep", "versionName"]) | |
if out is not None: | |
for line in out.splitlines(): | |
- separator = line.find('=') | |
+ separator = line.find("=") | |
if separator > -1: | |
- ver = line[separator + 1:].strip() | |
+ ver = line[separator + 1 :].strip() | |
if len(ver): | |
version = ver | |
- logging.debug('Package version for %s is %s', package, version) | |
+ logging.debug("Package version for %s is %s", package, version) | |
break | |
return version | |
--- internal/devtools_browser.py 2019-01-08 01:37:06.390282 +0000 | |
+++ internal/devtools_browser.py 2019-02-06 17:08:27.356167 +0000 | |
@@ -16,10 +16,11 @@ | |
from .optimization_checks import OptimizationChecks | |
class DevtoolsBrowser(object): | |
"""Devtools Browser base""" | |
+ | |
CONNECT_TIME_LIMIT = 120 | |
def __init__(self, options, job, use_devtools_video=True): | |
self.options = options | |
self.job = job | |
@@ -29,174 +30,205 @@ | |
self.browser_version = None | |
self.device_pixel_ratio = None | |
self.use_devtools_video = use_devtools_video | |
self.lighthouse_command = None | |
self.devtools_screenshot = True | |
- self.support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'support') | |
- self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js') | |
+ self.support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
+ self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "js") | |
def connect(self, task): | |
"""Connect to the dev tools interface""" | |
ret = False | |
from internal.devtools import DevTools | |
+ | |
self.devtools = DevTools(self.options, self.job, task, self.use_devtools_video) | |
- if task['running_lighthouse']: | |
+ if task["running_lighthouse"]: | |
ret = self.devtools.wait_for_available(self.CONNECT_TIME_LIMIT) | |
else: | |
if self.devtools.connect(self.CONNECT_TIME_LIMIT): | |
logging.debug("Devtools connected") | |
ret = True | |
else: | |
- task['error'] = "Error connecting to dev tools interface" | |
- logging.critical(task['error']) | |
+ task["error"] = "Error connecting to dev tools interface" | |
+ logging.critical(task["error"]) | |
self.devtools = None | |
return ret | |
def disconnect(self): | |
"""Disconnect from dev tools""" | |
if self.devtools is not None: | |
# Always navigate to about:blank after finishing in case the tab is | |
# remembered across sessions | |
- if self.task is not None and self.task['error'] is None: | |
- self.devtools.send_command('Page.navigate', {'url': 'about:blank'}, wait=True) | |
+ if self.task is not None and self.task["error"] is None: | |
+ self.devtools.send_command( | |
+ "Page.navigate", {"url": "about:blank"}, wait=True | |
+ ) | |
self.devtools.close() | |
self.devtools = None | |
def prepare_browser(self, task): | |
"""Prepare the running browser (mobile emulation, UA string, etc""" | |
if self.devtools is not None: | |
# Figure out the native viewport size | |
if not self.options.android: | |
- size = self.devtools.execute_js("[window.innerWidth, window.innerHeight]") | |
+ size = self.devtools.execute_js( | |
+ "[window.innerWidth, window.innerHeight]" | |
+ ) | |
if size is not None and len(size) == 2: | |
- task['actual_viewport'] = {"width": size[0], "height": size[1]} | |
+ task["actual_viewport"] = {"width": size[0], "height": size[1]} | |
# Get the native device pixel ratio | |
if self.device_pixel_ratio is None: | |
self.device_pixel_ratio = 1.0 | |
try: | |
- ratio = self.devtools.execute_js('window.devicePixelRatio') | |
+ ratio = self.devtools.execute_js("window.devicePixelRatio") | |
if ratio is not None: | |
self.device_pixel_ratio = max(1.0, float(ratio)) | |
except Exception: | |
pass | |
# Clear the caches | |
- if not task['cached']: | |
- self.devtools.send_command("Network.clearBrowserCache", {}, | |
- wait=True) | |
- self.devtools.send_command("Network.clearBrowserCookies", {}, | |
- wait=True) | |
+ if not task["cached"]: | |
+ self.devtools.send_command("Network.clearBrowserCache", {}, wait=True) | |
+ self.devtools.send_command("Network.clearBrowserCookies", {}, wait=True) | |
# Mobile Emulation | |
- if not self.options.android and \ | |
- 'mobile' in self.job and self.job['mobile'] and \ | |
- 'width' in self.job and 'height' in self.job and \ | |
- 'dpr' in self.job: | |
- self.devtools.send_command("Emulation.setTouchEmulationEnabled", | |
- {"enabled": True, | |
- "configuration": "mobile"}, | |
- wait=True) | |
- self.devtools.send_command("Emulation.setScrollbarsHidden", | |
- {"hidden": True}, | |
- wait=True) | |
- if not self.options.throttle and 'throttle_cpu' in self.job: | |
- logging.debug('CPU Throttle target: %0.3fx', self.job['throttle_cpu']) | |
- if self.job['throttle_cpu'] > 1: | |
- self.devtools.send_command("Emulation.setCPUThrottlingRate", | |
- {"rate": self.job['throttle_cpu']}, | |
- wait=True) | |
- width = int(re.search(r'\d+', str(self.job['width'])).group()) | |
- height = int(re.search(r'\d+', str(self.job['height'])).group()) | |
- self.devtools.send_command("Emulation.setDeviceMetricsOverride", | |
- {"width": width, | |
- "height": height, | |
- "screenWidth": width, | |
- "screenHeight": height, | |
- "scale": 1, | |
- "positionX": 0, | |
- "positionY": 0, | |
- "deviceScaleFactor": float(self.job['dpr']), | |
- "mobile": True, | |
- "screenOrientation": | |
- {"angle": 0, "type": "portraitPrimary"}}, | |
- wait=True) | |
+ if ( | |
+ not self.options.android | |
+ and "mobile" in self.job | |
+ and self.job["mobile"] | |
+ and "width" in self.job | |
+ and "height" in self.job | |
+ and "dpr" in self.job | |
+ ): | |
+ self.devtools.send_command( | |
+ "Emulation.setTouchEmulationEnabled", | |
+ {"enabled": True, "configuration": "mobile"}, | |
+ wait=True, | |
+ ) | |
+ self.devtools.send_command( | |
+ "Emulation.setScrollbarsHidden", {"hidden": True}, wait=True | |
+ ) | |
+ if not self.options.throttle and "throttle_cpu" in self.job: | |
+ logging.debug( | |
+ "CPU Throttle target: %0.3fx", self.job["throttle_cpu"] | |
+ ) | |
+ if self.job["throttle_cpu"] > 1: | |
+ self.devtools.send_command( | |
+ "Emulation.setCPUThrottlingRate", | |
+ {"rate": self.job["throttle_cpu"]}, | |
+ wait=True, | |
+ ) | |
+ width = int(re.search(r"\d+", str(self.job["width"])).group()) | |
+ height = int(re.search(r"\d+", str(self.job["height"])).group()) | |
+ self.devtools.send_command( | |
+ "Emulation.setDeviceMetricsOverride", | |
+ { | |
+ "width": width, | |
+ "height": height, | |
+ "screenWidth": width, | |
+ "screenHeight": height, | |
+ "scale": 1, | |
+ "positionX": 0, | |
+ "positionY": 0, | |
+ "deviceScaleFactor": float(self.job["dpr"]), | |
+ "mobile": True, | |
+ "screenOrientation": {"angle": 0, "type": "portraitPrimary"}, | |
+ }, | |
+ wait=True, | |
+ ) | |
# Location | |
- if 'lat' in self.job and 'lng' in self.job: | |
+ if "lat" in self.job and "lng" in self.job: | |
try: | |
- lat = float(str(self.job['lat'])) | |
- lng = float(str(self.job['lng'])) | |
+ lat = float(str(self.job["lat"])) | |
+ lng = float(str(self.job["lng"])) | |
self.devtools.send_command( | |
- 'Emulation.setGeolocationOverride', | |
- {'latitude': lat, 'longitude': lng, | |
- 'accuracy': 0}) | |
+ "Emulation.setGeolocationOverride", | |
+ {"latitude": lat, "longitude": lng, "accuracy": 0}, | |
+ ) | |
except Exception: | |
pass | |
# UA String | |
ua_string = self.devtools.execute_js("navigator.userAgent") | |
if ua_string is not None: | |
- match = re.search(r'Chrome\/(\d+\.\d+\.\d+\.\d+)', ua_string) | |
+ match = re.search(r"Chrome\/(\d+\.\d+\.\d+\.\d+)", ua_string) | |
if match: | |
self.browser_version = match.group(1) | |
- if 'uastring' in self.job: | |
- ua_string = self.job['uastring'] | |
- if ua_string is not None and 'AppendUA' in task: | |
- ua_string += ' ' + task['AppendUA'] | |
+ if "uastring" in self.job: | |
+ ua_string = self.job["uastring"] | |
+ if ua_string is not None and "AppendUA" in task: | |
+ ua_string += " " + task["AppendUA"] | |
if ua_string is not None: | |
- self.job['user_agent_string'] = ua_string | |
+ self.job["user_agent_string"] = ua_string | |
# Disable js | |
- if self.job['noscript']: | |
- self.devtools.send_command("Emulation.setScriptExecutionDisabled", | |
- {"value": True}, wait=True) | |
+ if self.job["noscript"]: | |
+ self.devtools.send_command( | |
+ "Emulation.setScriptExecutionDisabled", {"value": True}, wait=True | |
+ ) | |
self.devtools.prepare_browser() | |
def on_start_recording(self, task): | |
"""Start recording""" | |
- task['page_data'] = {'date': time.time()} | |
- task['page_result'] = None | |
- task['run_start_time'] = monotonic.monotonic() | |
- if self.browser_version is not None and 'browserVersion' not in task['page_data']: | |
- task['page_data']['browserVersion'] = self.browser_version | |
- task['page_data']['browser_version'] = self.browser_version | |
- if not self.options.throttle and 'throttle_cpu' in self.job: | |
- task['page_data']['throttle_cpu_requested'] = self.job['throttle_cpu_requested'] | |
- if self.job['throttle_cpu'] > 1: | |
- task['page_data']['throttle_cpu'] = self.job['throttle_cpu'] | |
+ task["page_data"] = {"date": time.time()} | |
+ task["page_result"] = None | |
+ task["run_start_time"] = monotonic.monotonic() | |
+ if ( | |
+ self.browser_version is not None | |
+ and "browserVersion" not in task["page_data"] | |
+ ): | |
+ task["page_data"]["browserVersion"] = self.browser_version | |
+ task["page_data"]["browser_version"] = self.browser_version | |
+ if not self.options.throttle and "throttle_cpu" in self.job: | |
+ task["page_data"]["throttle_cpu_requested"] = self.job[ | |
+ "throttle_cpu_requested" | |
+ ] | |
+ if self.job["throttle_cpu"] > 1: | |
+ task["page_data"]["throttle_cpu"] = self.job["throttle_cpu"] | |
if self.devtools is not None: | |
self.devtools.start_recording() | |
def on_stop_capture(self, task): | |
"""Do any quick work to stop things that are capturing data""" | |
if self.devtools is not None: | |
self.devtools.stop_capture() | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
hero_elements = None | |
custom_hero_selectors = {} | |
- if 'heroElements' in self.job: | |
- custom_hero_selectors = self.job['heroElements'] | |
- with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: | |
+ if "heroElements" in self.job: | |
+ custom_hero_selectors = self.job["heroElements"] | |
+ with open( | |
+ os.path.join(self.script_dir, "hero_elements.js"), "rb" | |
+ ) as script_file: | |
hero_elements_script = script_file.read() | |
- script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' | |
+ script = ( | |
+ hero_elements_script + "(" + json.dumps(custom_hero_selectors) + ")" | |
+ ) | |
hero_elements = self.devtools.execute_js(script) | |
if hero_elements is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join( | |
+ task["dir"], task["prefix"] + "_hero_elements.json.gz" | |
+ ) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(hero_elements)) | |
def on_stop_recording(self, task): | |
"""Stop recording""" | |
if self.devtools is not None: | |
self.devtools.collect_trace() | |
if self.devtools_screenshot: | |
- if self.job['pngScreenShot']: | |
- screen_shot = os.path.join(task['dir'], | |
- task['prefix'] + '_screen.png') | |
+ if self.job["pngScreenShot"]: | |
+ screen_shot = os.path.join( | |
+ task["dir"], task["prefix"] + "_screen.png" | |
+ ) | |
self.devtools.grab_screenshot(screen_shot, png=True) | |
else: | |
- screen_shot = os.path.join(task['dir'], | |
- task['prefix'] + '_screen.jpg') | |
+ screen_shot = os.path.join( | |
+ task["dir"], task["prefix"] + "_screen.jpg" | |
+ ) | |
self.devtools.grab_screenshot(screen_shot, png=False, resize=600) | |
# Collect end of test data from the browser | |
self.collect_browser_metrics(task) | |
# Stop recording dev tools (which also collects the trace) | |
self.devtools.stop_recording() | |
@@ -204,46 +236,49 @@ | |
def run_task(self, task): | |
"""Run an individual test""" | |
if self.devtools is not None: | |
self.task = task | |
logging.debug("Running test") | |
- end_time = monotonic.monotonic() + task['test_time_limit'] | |
- task['current_step'] = 1 | |
+ end_time = monotonic.monotonic() + task["test_time_limit"] | |
+ task["current_step"] = 1 | |
recording = False | |
- while len(task['script']) and task['error'] is None and \ | |
- monotonic.monotonic() < end_time: | |
+ while ( | |
+ len(task["script"]) | |
+ and task["error"] is None | |
+ and monotonic.monotonic() < end_time | |
+ ): | |
self.prepare_task(task) | |
- command = task['script'].pop(0) | |
- if not recording and command['record']: | |
+ command = task["script"].pop(0) | |
+ if not recording and command["record"]: | |
recording = True | |
self.on_start_recording(task) | |
self.process_command(command) | |
- if command['record']: | |
+ if command["record"]: | |
self.devtools.wait_for_page_load() | |
- if not task['combine_steps'] or not len(task['script']): | |
+ if not task["combine_steps"] or not len(task["script"]): | |
self.on_stop_capture(task) | |
self.on_stop_recording(task) | |
recording = False | |
self.on_start_processing(task) | |
self.wait_for_processing(task) | |
self.process_devtools_requests(task) | |
self.step_complete(task) | |
- if task['log_data']: | |
+ if task["log_data"]: | |
# Move on to the next step | |
- task['current_step'] += 1 | |
+ task["current_step"] += 1 | |
self.event_name = None | |
- task['navigated'] = True | |
+ task["navigated"] = True | |
self.task = None | |
def on_start_processing(self, task): | |
"""Start any processing of the captured data""" | |
- if task['log_data']: | |
+ if task["log_data"]: | |
# Start the processing that can run in a background thread | |
optimization = OptimizationChecks(self.job, task, self.get_requests()) | |
optimization.start() | |
# Run the video post-processing | |
- if self.use_devtools_video and self.job['video']: | |
+ if self.use_devtools_video and self.job["video"]: | |
self.process_video() | |
self.wappalyzer_detect(task, self.devtools.main_request_headers) | |
# wait for the background optimization checks | |
optimization.join() | |
@@ -258,182 +293,202 @@ | |
ret = self.devtools.execute_js(script) | |
return ret | |
def prepare_task(self, task): | |
"""Format the file prefixes for multi-step testing""" | |
- if task['current_step'] == 1: | |
- task['prefix'] = task['task_prefix'] | |
- task['video_subdirectory'] = task['task_video_prefix'] | |
+ if task["current_step"] == 1: | |
+ task["prefix"] = task["task_prefix"] | |
+ task["video_subdirectory"] = task["task_video_prefix"] | |
else: | |
- task['prefix'] = '{0}_{1:d}'.format(task['task_prefix'], task['current_step']) | |
- task['video_subdirectory'] = '{0}_{1:d}'.format(task['task_video_prefix'], | |
- task['current_step']) | |
- if task['video_subdirectory'] not in task['video_directories']: | |
- task['video_directories'].append(task['video_subdirectory']) | |
+ task["prefix"] = "{0}_{1:d}".format( | |
+ task["task_prefix"], task["current_step"] | |
+ ) | |
+ task["video_subdirectory"] = "{0}_{1:d}".format( | |
+ task["task_video_prefix"], task["current_step"] | |
+ ) | |
+ if task["video_subdirectory"] not in task["video_directories"]: | |
+ task["video_directories"].append(task["video_subdirectory"]) | |
if self.event_name is not None: | |
- task['step_name'] = self.event_name | |
+ task["step_name"] = self.event_name | |
else: | |
- task['step_name'] = 'Step_{0:d}'.format(task['current_step']) | |
+ task["step_name"] = "Step_{0:d}".format(task["current_step"]) | |
def process_video(self): | |
"""Post process the video""" | |
from internal.video_processing import VideoProcessing | |
+ | |
video = VideoProcessing(self.options, self.job, self.task) | |
video.process() | |
def process_devtools_requests(self, task): | |
"""Process the devtools log and pull out the requests information""" | |
- path_base = os.path.join(self.task['dir'], self.task['prefix']) | |
- devtools_file = path_base + '_devtools.json.gz' | |
+ path_base = os.path.join(self.task["dir"], self.task["prefix"]) | |
+ devtools_file = path_base + "_devtools.json.gz" | |
if os.path.isfile(devtools_file): | |
from internal.support.devtools_parser import DevToolsParser | |
- out_file = path_base + '_devtools_requests.json.gz' | |
- options = {'devtools': devtools_file, 'cached': task['cached'], 'out': out_file} | |
- netlog = path_base + '_netlog_requests.json.gz' | |
- options['netlog'] = netlog if os.path.isfile(netlog) else None | |
- optimization = path_base + '_optimization.json.gz' | |
- options['optimization'] = optimization if os.path.isfile(optimization) else None | |
- user_timing = path_base + '_user_timing.json.gz' | |
- options['user'] = user_timing if os.path.isfile(user_timing) else None | |
- coverage = path_base + '_coverage.json.gz' | |
- options['coverage'] = coverage if os.path.isfile(coverage) else None | |
- cpu = path_base + '_timeline_cpu.json.gz' | |
- options['cpu'] = cpu if os.path.isfile(cpu) else None | |
+ | |
+ out_file = path_base + "_devtools_requests.json.gz" | |
+ options = { | |
+ "devtools": devtools_file, | |
+ "cached": task["cached"], | |
+ "out": out_file, | |
+ } | |
+ netlog = path_base + "_netlog_requests.json.gz" | |
+ options["netlog"] = netlog if os.path.isfile(netlog) else None | |
+ optimization = path_base + "_optimization.json.gz" | |
+ options["optimization"] = ( | |
+ optimization if os.path.isfile(optimization) else None | |
+ ) | |
+ user_timing = path_base + "_user_timing.json.gz" | |
+ options["user"] = user_timing if os.path.isfile(user_timing) else None | |
+ coverage = path_base + "_coverage.json.gz" | |
+ options["coverage"] = coverage if os.path.isfile(coverage) else None | |
+ cpu = path_base + "_timeline_cpu.json.gz" | |
+ options["cpu"] = cpu if os.path.isfile(cpu) else None | |
parser = DevToolsParser(options) | |
parser.process() | |
# Cleanup intermediate files that are not needed | |
- if 'debug' not in self.job or not self.job['debug']: | |
+ if "debug" not in self.job or not self.job["debug"]: | |
if os.path.isfile(netlog): | |
os.remove(netlog) | |
if os.path.isfile(optimization): | |
os.remove(optimization) | |
if os.path.isfile(coverage): | |
os.remove(coverage) | |
if os.path.isfile(devtools_file): | |
os.remove(devtools_file) | |
- if 'page_data' in parser.result and 'result' in parser.result['page_data']: | |
- self.task['page_result'] = parser.result['page_data']['result'] | |
+ if "page_data" in parser.result and "result" in parser.result["page_data"]: | |
+ self.task["page_result"] = parser.result["page_data"]["result"] | |
def run_js_file(self, file_name): | |
"""Execute one of our js scripts""" | |
ret = None | |
script = None | |
script_file_path = os.path.join(self.script_dir, file_name) | |
if os.path.isfile(script_file_path): | |
- with open(script_file_path, 'rb') as script_file: | |
+ with open(script_file_path, "rb") as script_file: | |
script = script_file.read() | |
if script is not None: | |
ret = self.devtools.execute_js(script) | |
return ret | |
def collect_browser_metrics(self, task): | |
"""Collect all of the in-page browser metrics that we need""" | |
- user_timing = self.run_js_file('user_timing.js') | |
+ user_timing = self.run_js_file("user_timing.js") | |
if user_timing is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join(task["dir"], task["prefix"] + "_timed_events.json.gz") | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(user_timing)) | |
- page_data = self.run_js_file('page_data.js') | |
+ page_data = self.run_js_file("page_data.js") | |
if page_data is not None: | |
- task['page_data'].update(page_data) | |
- if 'customMetrics' in self.job: | |
+ task["page_data"].update(page_data) | |
+ if "customMetrics" in self.job: | |
custom_metrics = {} | |
- for name in self.job['customMetrics']: | |
- script = 'var wptCustomMetric = function() {' +\ | |
- self.job['customMetrics'][name] +\ | |
- '};try{wptCustomMetric();}catch(e){};' | |
+ for name in self.job["customMetrics"]: | |
+ script = ( | |
+ "var wptCustomMetric = function() {" | |
+ + self.job["customMetrics"][name] | |
+ + "};try{wptCustomMetric();}catch(e){};" | |
+ ) | |
custom_metrics[name] = self.devtools.execute_js(script) | |
- path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join(task["dir"], task["prefix"] + "_metrics.json.gz") | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(custom_metrics)) | |
def process_command(self, command): | |
"""Process an individual script command""" | |
logging.debug("Processing script command:") | |
logging.debug(command) | |
- if command['command'] == 'navigate': | |
- self.task['page_data']['URL'] = command['target'] | |
- url = str(command['target']).replace('"', '\"') | |
+ if command["command"] == "navigate": | |
+ self.task["page_data"]["URL"] = command["target"] | |
+ url = str(command["target"]).replace('"', '"') | |
script = 'window.location="{0}";'.format(url) | |
script = self.prepare_script_for_record(script) | |
self.devtools.start_navigating() | |
self.devtools.execute_js(script) | |
- elif command['command'] == 'logdata': | |
- self.task['combine_steps'] = False | |
- if int(re.search(r'\d+', str(command['target'])).group()): | |
+ elif command["command"] == "logdata": | |
+ self.task["combine_steps"] = False | |
+ if int(re.search(r"\d+", str(command["target"])).group()): | |
logging.debug("Data logging enabled") | |
- self.task['log_data'] = True | |
+ self.task["log_data"] = True | |
else: | |
logging.debug("Data logging disabled") | |
- self.task['log_data'] = False | |
- elif command['command'] == 'combinesteps': | |
- self.task['log_data'] = True | |
- self.task['combine_steps'] = True | |
- elif command['command'] == 'seteventname': | |
- self.event_name = command['target'] | |
- elif command['command'] == 'exec': | |
- script = command['target'] | |
- if command['record']: | |
+ self.task["log_data"] = False | |
+ elif command["command"] == "combinesteps": | |
+ self.task["log_data"] = True | |
+ self.task["combine_steps"] = True | |
+ elif command["command"] == "seteventname": | |
+ self.event_name = command["target"] | |
+ elif command["command"] == "exec": | |
+ script = command["target"] | |
+ if command["record"]: | |
script = self.prepare_script_for_record(script) | |
self.devtools.start_navigating() | |
self.devtools.execute_js(script) | |
- elif command['command'] == 'sleep': | |
- delay = min(60, max(0, int(re.search(r'\d+', str(command['target'])).group()))) | |
+ elif command["command"] == "sleep": | |
+ delay = min( | |
+ 60, max(0, int(re.search(r"\d+", str(command["target"])).group())) | |
+ ) | |
if delay > 0: | |
time.sleep(delay) | |
- elif command['command'] == 'setabm': | |
- self.task['stop_at_onload'] = bool('target' in command and | |
- int(re.search(r'\d+', | |
- str(command['target'])).group()) == 0) | |
- elif command['command'] == 'setactivitytimeout': | |
- if 'target' in command: | |
- milliseconds = int(re.search(r'\d+', str(command['target'])).group()) | |
- self.task['activity_time'] = max(0, min(30, float(milliseconds) / 1000.0)) | |
- elif command['command'] == 'setuseragent': | |
- self.task['user_agent_string'] = command['target'] | |
- elif command['command'] == 'setcookie': | |
- if 'target' in command and 'value' in command: | |
- url = command['target'].strip() | |
- cookie = command['value'] | |
- pos = cookie.find(';') | |
+ elif command["command"] == "setabm": | |
+ self.task["stop_at_onload"] = bool( | |
+ "target" in command | |
+ and int(re.search(r"\d+", str(command["target"])).group()) == 0 | |
+ ) | |
+ elif command["command"] == "setactivitytimeout": | |
+ if "target" in command: | |
+ milliseconds = int(re.search(r"\d+", str(command["target"])).group()) | |
+ self.task["activity_time"] = max( | |
+ 0, min(30, float(milliseconds) / 1000.0) | |
+ ) | |
+ elif command["command"] == "setuseragent": | |
+ self.task["user_agent_string"] = command["target"] | |
+ elif command["command"] == "setcookie": | |
+ if "target" in command and "value" in command: | |
+ url = command["target"].strip() | |
+ cookie = command["value"] | |
+ pos = cookie.find(";") | |
if pos > 0: | |
cookie = cookie[:pos] | |
- pos = cookie.find('=') | |
+ pos = cookie.find("=") | |
if pos > 0: | |
name = cookie[:pos].strip() | |
- value = cookie[pos + 1:].strip() | |
+ value = cookie[pos + 1 :].strip() | |
if len(name) and len(value) and len(url): | |
- self.devtools.send_command('Network.setCookie', | |
- {'url': url, 'name': name, 'value': value}) | |
- elif command['command'] == 'setlocation': | |
+ self.devtools.send_command( | |
+ "Network.setCookie", | |
+ {"url": url, "name": name, "value": value}, | |
+ ) | |
+ elif command["command"] == "setlocation": | |
try: | |
- if 'target' in command and command['target'].find(',') > 0: | |
+ if "target" in command and command["target"].find(",") > 0: | |
accuracy = 0 | |
- if 'value' in command and re.match(r'\d+', command['value']): | |
- accuracy = int(re.search(r'\d+', str(command['value'])).group()) | |
- parts = command['target'].split(',') | |
+ if "value" in command and re.match(r"\d+", command["value"]): | |
+ accuracy = int(re.search(r"\d+", str(command["value"])).group()) | |
+ parts = command["target"].split(",") | |
lat = float(parts[0]) | |
lng = float(parts[1]) | |
self.devtools.send_command( | |
- 'Emulation.setGeolocationOverride', | |
- {'latitude': lat, 'longitude': lng, | |
- 'accuracy': accuracy}) | |
+ "Emulation.setGeolocationOverride", | |
+ {"latitude": lat, "longitude": lng, "accuracy": accuracy}, | |
+ ) | |
except Exception: | |
pass | |
- elif command['command'] == 'addheader': | |
- self.devtools.set_header(command['target']) | |
- elif command['command'] == 'setheader': | |
- self.devtools.set_header(command['target']) | |
- elif command['command'] == 'resetheaders': | |
+ elif command["command"] == "addheader": | |
+ self.devtools.set_header(command["target"]) | |
+ elif command["command"] == "setheader": | |
+ self.devtools.set_header(command["target"]) | |
+ elif command["command"] == "resetheaders": | |
self.devtools.reset_headers() | |
- elif command['command'] == 'clearcache': | |
+ elif command["command"] == "clearcache": | |
self.devtools.clear_cache() | |
def navigate(self, url): | |
"""Navigate to the given URL""" | |
if self.devtools is not None: | |
- self.devtools.send_command('Page.navigate', {'url': url}, wait=True) | |
+ self.devtools.send_command("Page.navigate", {"url": url}, wait=True) | |
def get_requests(self): | |
"""Get the request details for running an optimization check""" | |
requests = None | |
if self.devtools is not None: | |
@@ -441,181 +496,222 @@ | |
return requests | |
def lighthouse_thread(self): | |
"""Run lighthouse in a thread so we can kill it if it times out""" | |
cmd = self.lighthouse_command | |
- self.task['lighthouse_log'] = cmd + "\n" | |
+ self.task["lighthouse_log"] = cmd + "\n" | |
logging.debug(cmd) | |
proc = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) | |
- for line in iter(proc.stderr.readline, b''): | |
+ for line in iter(proc.stderr.readline, b""): | |
try: | |
logging.debug(line.rstrip()) | |
- self.task['lighthouse_log'] += line | |
+ self.task["lighthouse_log"] += line | |
except Exception: | |
pass | |
proc.communicate() | |
def run_lighthouse_test(self, task): | |
"""Run a lighthouse test against the current browser session""" | |
- task['lighthouse_log'] = '' | |
- if 'url' in self.job and self.job['url'] is not None: | |
- self.job['shaper'].configure(self.job, task) | |
- output_path = os.path.join(task['dir'], 'lighthouse.json') | |
- json_file = os.path.join(task['dir'], 'lighthouse.report.json') | |
- json_gzip = os.path.join(task['dir'], 'lighthouse.json.gz') | |
- html_file = os.path.join(task['dir'], 'lighthouse.report.html') | |
- html_gzip = os.path.join(task['dir'], 'lighthouse.html.gz') | |
- time_limit = min(int(task['time_limit']), 80) | |
- command = ['lighthouse', | |
- '"{0}"'.format(self.job['url']), | |
- '--disable-network-throttling', | |
- '--disable-cpu-throttling', | |
- '--throttling-method', 'provided', | |
- '--enable-error-reporting', | |
- '--max-wait-for-load', str(int(time_limit * 1000)), | |
- '--port', str(task['port']), | |
- '--output', 'html', | |
- '--output', 'json', | |
- '--output-path', '"{0}"'.format(output_path)] | |
- if self.job['keep_lighthouse_trace']: | |
- command.append('--save-assets') | |
- if self.options.android or 'mobile' not in self.job or not self.job['mobile']: | |
- command.append('--disable-device-emulation') | |
- if 'user_agent_string' in self.job: | |
- sanitized_user_agent = re.sub(r'[^a-zA-Z0-9_\-.;:/()\[\] ]+', '', self.job['user_agent_string']) | |
- command.append('--chrome-flags="--user-agent=\'{0}\'"'.format(sanitized_user_agent)) | |
- if len(task['block']): | |
- for pattern in task['block']: | |
+ task["lighthouse_log"] = "" | |
+ if "url" in self.job and self.job["url"] is not None: | |
+ self.job["shaper"].configure(self.job, task) | |
+ output_path = os.path.join(task["dir"], "lighthouse.json") | |
+ json_file = os.path.join(task["dir"], "lighthouse.report.json") | |
+ json_gzip = os.path.join(task["dir"], "lighthouse.json.gz") | |
+ html_file = os.path.join(task["dir"], "lighthouse.report.html") | |
+ html_gzip = os.path.join(task["dir"], "lighthouse.html.gz") | |
+ time_limit = min(int(task["time_limit"]), 80) | |
+ command = [ | |
+ "lighthouse", | |
+ '"{0}"'.format(self.job["url"]), | |
+ "--disable-network-throttling", | |
+ "--disable-cpu-throttling", | |
+ "--throttling-method", | |
+ "provided", | |
+ "--enable-error-reporting", | |
+ "--max-wait-for-load", | |
+ str(int(time_limit * 1000)), | |
+ "--port", | |
+ str(task["port"]), | |
+ "--output", | |
+ "html", | |
+ "--output", | |
+ "json", | |
+ "--output-path", | |
+ '"{0}"'.format(output_path), | |
+ ] | |
+ if self.job["keep_lighthouse_trace"]: | |
+ command.append("--save-assets") | |
+ if ( | |
+ self.options.android | |
+ or "mobile" not in self.job | |
+ or not self.job["mobile"] | |
+ ): | |
+ command.append("--disable-device-emulation") | |
+ if "user_agent_string" in self.job: | |
+ sanitized_user_agent = re.sub( | |
+ r"[^a-zA-Z0-9_\-.;:/()\[\] ]+", | |
+ "", | |
+ self.job["user_agent_string"], | |
+ ) | |
+ command.append( | |
+ "--chrome-flags=\"--user-agent='{0}'\"".format( | |
+ sanitized_user_agent | |
+ ) | |
+ ) | |
+ if len(task["block"]): | |
+ for pattern in task["block"]: | |
pattern = "'" + pattern.replace("'", "'\\''") + "'" | |
- command.extend(['--blocked-url-patterns', pattern]) | |
- if 'headers' in task: | |
- headers_file = os.path.join(task['dir'], 'lighthouse-headers.json') | |
- with open(headers_file, 'wb') as f_out: | |
- json.dump(task['headers'], f_out) | |
- command.extend(['--extra-headers', '"{0}"'.format(headers_file)]) | |
- cmd = ' '.join(command) | |
+ command.extend(["--blocked-url-patterns", pattern]) | |
+ if "headers" in task: | |
+ headers_file = os.path.join(task["dir"], "lighthouse-headers.json") | |
+ with open(headers_file, "wb") as f_out: | |
+ json.dump(task["headers"], f_out) | |
+ command.extend(["--extra-headers", '"{0}"'.format(headers_file)]) | |
+ cmd = " ".join(command) | |
self.lighthouse_command = cmd | |
# Give lighthouse up to 10 minutes to run all of the audits | |
try: | |
lh_thread = threading.Thread(target=self.lighthouse_thread) | |
lh_thread.start() | |
lh_thread.join(600) | |
except Exception: | |
pass | |
from .os_util import kill_all | |
- kill_all('node', True) | |
- self.job['shaper'].reset() | |
+ | |
+ kill_all("node", True) | |
+ self.job["shaper"].reset() | |
# Rename and compress the trace file, delete the other assets | |
- if self.job['keep_lighthouse_trace']: | |
+ if self.job["keep_lighthouse_trace"]: | |
try: | |
- lh_trace_src = os.path.join(task['dir'], 'lighthouse-0.trace.json') | |
+ lh_trace_src = os.path.join(task["dir"], "lighthouse-0.trace.json") | |
if os.path.isfile(lh_trace_src): | |
# read the JSON in and re-write it line by line to match the other traces | |
- with open(lh_trace_src, 'rb') as f_in: | |
+ with open(lh_trace_src, "rb") as f_in: | |
trace = json.load(f_in) | |
- if trace is not None and 'traceEvents' in trace: | |
- lighthouse_trace = os.path.join(task['dir'], | |
- 'lighthouse_trace.json.gz') | |
- with gzip.open(lighthouse_trace, 'wb', 7) as f_out: | |
+ if trace is not None and "traceEvents" in trace: | |
+ lighthouse_trace = os.path.join( | |
+ task["dir"], "lighthouse_trace.json.gz" | |
+ ) | |
+ with gzip.open(lighthouse_trace, "wb", 7) as f_out: | |
f_out.write('{"traceEvents":[{}') | |
- for trace_event in trace['traceEvents']: | |
+ for trace_event in trace["traceEvents"]: | |
f_out.write(",\n") | |
f_out.write(json.dumps(trace_event)) | |
f_out.write("\n]}") | |
except Exception: | |
pass | |
# Delete all the left-over lighthouse assets | |
- files = glob.glob(os.path.join(task['dir'], 'lighthouse-*')) | |
+ files = glob.glob(os.path.join(task["dir"], "lighthouse-*")) | |
for file_path in files: | |
try: | |
os.remove(file_path) | |
except Exception: | |
pass | |
if os.path.isfile(json_file): | |
# Remove the raw screenshots if they were stored with the file | |
lh_report = None | |
- with open(json_file, 'rb') as f_in: | |
+ with open(json_file, "rb") as f_in: | |
lh_report = json.load(f_in) | |
modified = False | |
- if lh_report is not None and 'audits' in lh_report: | |
- if 'screenshots' in lh_report['audits']: | |
- del lh_report['audits']['screenshots'] | |
+ if lh_report is not None and "audits" in lh_report: | |
+ if "screenshots" in lh_report["audits"]: | |
+ del lh_report["audits"]["screenshots"] | |
modified = True | |
- if 'screenshot-thumbnails' in lh_report['audits']: | |
- del lh_report['audits']['screenshot-thumbnails'] | |
+ if "screenshot-thumbnails" in lh_report["audits"]: | |
+ del lh_report["audits"]["screenshot-thumbnails"] | |
modified = True | |
if modified: | |
- with gzip.open(json_gzip, 'wb', 7) as f_out: | |
+ with gzip.open(json_gzip, "wb", 7) as f_out: | |
json.dump(lh_report, f_out) | |
else: | |
- with open(json_file, 'rb') as f_in: | |
- with gzip.open(json_gzip, 'wb', 7) as f_out: | |
+ with open(json_file, "rb") as f_in: | |
+ with gzip.open(json_gzip, "wb", 7) as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
try: | |
os.remove(json_file) | |
except Exception: | |
pass | |
# Extract the audit scores | |
if lh_report is not None: | |
audits = {} | |
# v1.x | |
- if 'aggregations' in lh_report: | |
- for entry in lh_report['aggregations']: | |
- if 'name' in entry and 'total' in entry and \ | |
- 'scored' in entry and entry['scored']: | |
- name = entry['name'].replace(' ', '') | |
- audits[name] = entry['total'] | |
+ if "aggregations" in lh_report: | |
+ for entry in lh_report["aggregations"]: | |
+ if ( | |
+ "name" in entry | |
+ and "total" in entry | |
+ and "scored" in entry | |
+ and entry["scored"] | |
+ ): | |
+ name = entry["name"].replace(" ", "") | |
+ audits[name] = entry["total"] | |
# v2.x | |
- elif 'reportCategories' in lh_report: | |
- for category in lh_report['reportCategories']: | |
- if 'name' in category and 'score' in category: | |
- category_name = category['name'].replace(' ', '') | |
- score = float(category['score']) / 100.0 | |
+ elif "reportCategories" in lh_report: | |
+ for category in lh_report["reportCategories"]: | |
+ if "name" in category and "score" in category: | |
+ category_name = category["name"].replace(" ", "") | |
+ score = float(category["score"]) / 100.0 | |
audits[category_name] = score | |
- if category['name'] == 'Performance' and 'audits' in category: | |
- for audit in category['audits']: | |
- if 'id' in audit and 'group' in audit and \ | |
- audit['group'] == 'perf-metric' and \ | |
- 'result' in audit and \ | |
- 'rawValue' in audit['result']: | |
- name = category_name + '.' + \ | |
- audit['id'].replace(' ', '') | |
- audits[name] = audit['result']['rawValue'] | |
+ if ( | |
+ category["name"] == "Performance" | |
+ and "audits" in category | |
+ ): | |
+ for audit in category["audits"]: | |
+ if ( | |
+ "id" in audit | |
+ and "group" in audit | |
+ and audit["group"] == "perf-metric" | |
+ and "result" in audit | |
+ and "rawValue" in audit["result"] | |
+ ): | |
+ name = ( | |
+ category_name | |
+ + "." | |
+ + audit["id"].replace(" ", "") | |
+ ) | |
+ audits[name] = audit["result"]["rawValue"] | |
# v3.x | |
- elif 'categories' in lh_report: | |
- for categoryId in lh_report['categories']: | |
- category = lh_report['categories'][categoryId] | |
- if 'title' not in category or 'score' not in category: | |
+ elif "categories" in lh_report: | |
+ for categoryId in lh_report["categories"]: | |
+ category = lh_report["categories"][categoryId] | |
+ if "title" not in category or "score" not in category: | |
continue | |
- category_title = category['title'].replace(' ', '') | |
- audits[category_title] = category['score'] | |
- | |
- if categoryId != 'performance' or 'auditRefs' not in category: | |
+ category_title = category["title"].replace(" ", "") | |
+ audits[category_title] = category["score"] | |
+ | |
+ if ( | |
+ categoryId != "performance" | |
+ or "auditRefs" not in category | |
+ ): | |
continue | |
- for auditRef in category['auditRefs']: | |
- if auditRef['id'] not in lh_report['audits']: | |
+ for auditRef in category["auditRefs"]: | |
+ if auditRef["id"] not in lh_report["audits"]: | |
continue | |
- if 'group' not in auditRef or auditRef['group'] != 'metrics': | |
+ if ( | |
+ "group" not in auditRef | |
+ or auditRef["group"] != "metrics" | |
+ ): | |
continue | |
- audit = lh_report['audits'][auditRef['id']] | |
- name = category_title + '.' + audit['id'] | |
- audits[name] = audit['rawValue'] | |
- audits_gzip = os.path.join(task['dir'], 'lighthouse_audits.json.gz') | |
- with gzip.open(audits_gzip, 'wb', 7) as f_out: | |
+ audit = lh_report["audits"][auditRef["id"]] | |
+ name = category_title + "." + audit["id"] | |
+ audits[name] = audit["rawValue"] | |
+ audits_gzip = os.path.join(task["dir"], "lighthouse_audits.json.gz") | |
+ with gzip.open(audits_gzip, "wb", 7) as f_out: | |
json.dump(audits, f_out) | |
if os.path.isfile(html_file): | |
# Remove the raw screenshots if they were stored with the file | |
- with open(html_file, 'rb') as f_in: | |
+ with open(html_file, "rb") as f_in: | |
lh_report = f_in.read() | |
- start = lh_report.find('\n "screenshots') | |
+ start = lh_report.find("\n "screenshots") | |
if start >= 0: | |
- end = lh_report.find('\n },', start) | |
+ end = lh_report.find("\n },", start) | |
if end >= 0: | |
- lh_report = lh_report[:start] + lh_report[end + 7:] | |
- with gzip.open(html_gzip, 'wb', 7) as f_out: | |
+ lh_report = lh_report[:start] + lh_report[end + 7 :] | |
+ with gzip.open(html_gzip, "wb", 7) as f_out: | |
f_out.write(lh_report) | |
try: | |
os.remove(html_file) | |
except Exception: | |
pass | |
@@ -624,48 +720,62 @@ | |
"""Run the wappalyzer detection""" | |
# Run the Wappalyzer detection (give it 30 seconds at most) | |
completed = False | |
if self.devtools is not None: | |
try: | |
- logging.debug('wappalyzer_detect') | |
+ logging.debug("wappalyzer_detect") | |
detect_script = self.wappalyzer_script(request_headers) | |
- response = self.devtools.send_command("Runtime.evaluate", | |
- {'expression': detect_script, | |
- 'awaitPromise': True, | |
- 'returnByValue': True, | |
- 'timeout': 30000}, | |
- wait=True, timeout=30) | |
- if response is not None and 'result' in response and\ | |
- 'result' in response['result'] and\ | |
- 'value' in response['result']['result']: | |
- result = response['result']['result']['value'] | |
+ response = self.devtools.send_command( | |
+ "Runtime.evaluate", | |
+ { | |
+ "expression": detect_script, | |
+ "awaitPromise": True, | |
+ "returnByValue": True, | |
+ "timeout": 30000, | |
+ }, | |
+ wait=True, | |
+ timeout=30, | |
+ ) | |
+ if ( | |
+ response is not None | |
+ and "result" in response | |
+ and "result" in response["result"] | |
+ and "value" in response["result"]["result"] | |
+ ): | |
+ result = response["result"]["result"]["value"] | |
if result: | |
completed = True | |
logging.debug(result) | |
detected = json.loads(result) | |
- if 'categories' in detected: | |
- task['page_data']['detected'] = dict(detected['categories']) | |
- if 'apps' in detected: | |
- task['page_data']['detected_apps'] = dict(detected['apps']) | |
+ if "categories" in detected: | |
+ task["page_data"]["detected"] = dict(detected["categories"]) | |
+ if "apps" in detected: | |
+ task["page_data"]["detected_apps"] = dict(detected["apps"]) | |
except Exception as err: | |
logging.exception("Exception running Wappalyzer: %s", err.__str__()) | |
if not completed: | |
- task['page_data']['wappalyzer_failed'] = 1 | |
+ task["page_data"]["wappalyzer_failed"] = 1 | |
def wappalyzer_script(self, response_headers): | |
"""Build the wappalyzer script to run in-browser""" | |
script = None | |
try: | |
- with open(os.path.join(self.support_path, 'Wappalyzer', 'script.js')) as f_in: | |
+ with open( | |
+ os.path.join(self.support_path, "Wappalyzer", "script.js") | |
+ ) as f_in: | |
script = f_in.read() | |
if script is not None: | |
wappalyzer = None | |
- with open(os.path.join(self.support_path, 'Wappalyzer', 'wappalyzer.js')) as f_in: | |
+ with open( | |
+ os.path.join(self.support_path, "Wappalyzer", "wappalyzer.js") | |
+ ) as f_in: | |
wappalyzer = f_in.read() | |
if wappalyzer is not None: | |
json_data = None | |
- with open(os.path.join(self.support_path, 'Wappalyzer', 'apps.json')) as f_in: | |
+ with open( | |
+ os.path.join(self.support_path, "Wappalyzer", "apps.json") | |
+ ) as f_in: | |
json_data = f_in.read() | |
if json is not None: | |
# Format the headers as a dictionary of lists | |
headers = {} | |
if response_headers is not None: | |
@@ -674,26 +784,28 @@ | |
values = [] | |
entry = response_headers[key] | |
if isinstance(entry, list): | |
values = entry | |
elif isinstance(entry, (str, unicode)): | |
- entries = entry.split('\n') | |
+ entries = entry.split("\n") | |
for value in entries: | |
values.append(value.strip()) | |
if values: | |
headers[key.lower()] = values | |
elif isinstance(response_headers, list): | |
for pair in response_headers: | |
if isinstance(pair, (str, unicode)): | |
- parts = pair.split(':', 1) | |
- key = parts[0].strip(' :\n\t').lower() | |
- value = parts[1].strip(' :\n\t') | |
+ parts = pair.split(":", 1) | |
+ key = parts[0].strip(" :\n\t").lower() | |
+ value = parts[1].strip(" :\n\t") | |
if key not in headers: | |
headers[key] = [] | |
headers[key].append(value) | |
- script = script.replace('%WAPPALYZER%', wappalyzer) | |
- script = script.replace('%JSON%', json_data) | |
- script = script.replace('%RESPONSE_HEADERS%', json.dumps(headers)) | |
+ script = script.replace("%WAPPALYZER%", wappalyzer) | |
+ script = script.replace("%JSON%", json_data) | |
+ script = script.replace( | |
+ "%RESPONSE_HEADERS%", json.dumps(headers) | |
+ ) | |
except Exception: | |
pass | |
return script | |
--- internal/optimization_checks.py 2019-02-06 16:55:03.353561 +0000 | |
+++ internal/optimization_checks.py 2019-02-06 17:08:28.237755 +0000 | |
@@ -17,10 +17,11 @@ | |
import ujson as json | |
class OptimizationChecks(object): | |
"""Threaded optimization checks""" | |
+ | |
def __init__(self, job, task, requests): | |
self.job = job | |
self.task = task | |
self.running_checks = False | |
self.requests = requests | |
@@ -44,221 +45,190 @@ | |
self.dns_result_queue = Queue.Queue() | |
self.fetch_queue = Queue.Queue() | |
self.fetch_result_queue = Queue.Queue() | |
# spell-checker: disable | |
self.cdn_cnames = { | |
- 'Advanced Hosters CDN': ['.pix-cdn.org'], | |
- 'afxcdn.net': ['.afxcdn.net'], | |
- 'Akamai': ['.akamai.net', | |
- '.akamaized.net', | |
- '.akamaiedge.net', | |
- '.akamaihd.net', | |
- '.edgesuite.net', | |
- '.edgekey.net', | |
- '.srip.net', | |
- '.akamaitechnologies.com', | |
- '.akamaitechnologies.fr'], | |
- 'Akamai China CDN': ['.tl88.net'], | |
- 'Alimama': ['.gslb.tbcache.com'], | |
- 'Amazon CloudFront': ['.cloudfront.net'], | |
- 'Aryaka': ['.aads1.net', | |
- '.aads-cn.net', | |
- '.aads-cng.net'], | |
- 'AT&T': ['.att-dsa.net'], | |
- 'Azion': ['.azioncdn.net', | |
- '.azioncdn.com', | |
- '.azion.net'], | |
- 'BelugaCDN': ['.belugacdn.com', | |
- '.belugacdn.link'], | |
- 'Bison Grid': ['.bisongrid.net'], | |
- 'BitGravity': ['.bitgravity.com'], | |
- 'Blue Hat Network': ['.bluehatnetwork.com'], | |
- 'BO.LT': ['bo.lt'], | |
- 'BunnyCDN': ['.b-cdn.net'], | |
- 'Cachefly': ['.cachefly.net'], | |
- 'Caspowa': ['.caspowa.com'], | |
- 'Cedexis': ['.cedexis.net'], | |
- 'CDN77': ['.cdn77.net', | |
- '.cdn77.org'], | |
- 'CDNetworks': ['.cdngc.net', | |
- '.gccdn.net', | |
- '.panthercdn.com'], | |
- 'CDNsun': ['.cdnsun.net'], | |
- 'CDNvideo': ['.cdnvideo.ru', | |
- '.cdnvideo.net'], | |
- 'ChinaCache': ['.ccgslb.com'], | |
- 'ChinaNetCenter': ['.lxdns.com', | |
- '.wscdns.com', | |
- '.wscloudcdn.com', | |
- '.ourwebpic.com'], | |
- 'Cloudflare': ['.cloudflare.com', | |
- '.cloudflare.net'], | |
- 'Cotendo CDN': ['.cotcdn.net'], | |
- 'cubeCDN': ['.cubecdn.net'], | |
- 'Edgecast': ['edgecastcdn.net', | |
- '.systemcdn.net', | |
- '.transactcdn.net', | |
- '.v1cdn.net', | |
- '.v2cdn.net', | |
- '.v3cdn.net', | |
- '.v4cdn.net', | |
- '.v5cdn.net'], | |
- 'Facebook': ['.facebook.com', | |
- '.facebook.net', | |
- '.fbcdn.net', | |
- '.cdninstagram.com'], | |
- 'Fastly': ['.fastly.net', | |
- '.fastlylb.net', | |
- '.nocookie.net'], | |
- 'GoCache': ['.cdn.gocache.net'], | |
- 'Google': ['.google.', | |
- 'googlesyndication.', | |
- 'youtube.', | |
- '.googleusercontent.com', | |
- 'googlehosted.com', | |
- '.gstatic.com', | |
- '.doubleclick.net'], | |
- 'HiberniaCDN': ['.hiberniacdn.com'], | |
- 'Highwinds': ['hwcdn.net'], | |
- 'Hosting4CDN': ['.hosting4cdn.com'], | |
- 'ImageEngine': ['.imgeng.in'], | |
- 'Incapsula': ['.incapdns.net'], | |
- 'Instart Logic': ['.insnw.net', | |
- '.inscname.net'], | |
- 'Internap': ['.internapcdn.net'], | |
- 'jsDelivr': ['cdn.jsdelivr.net'], | |
- 'KeyCDN': ['.kxcdn.com'], | |
- 'KINX CDN': ['.kinxcdn.com', | |
- '.kinxcdn.net'], | |
- 'LeaseWeb CDN': ['.lswcdn.net', | |
- '.lswcdn.eu'], | |
- 'Level 3': ['.footprint.net', | |
- '.fpbns.net'], | |
- 'Limelight': ['.llnwd.net', | |
- '.llnwi.net', | |
- '.lldns.net'], | |
- 'MediaCloud': ['.cdncloud.net.au'], | |
- 'Medianova': ['.mncdn.com', | |
- '.mncdn.net', | |
- '.mncdn.org'], | |
- 'Microsoft Azure': ['.vo.msecnd.net', | |
- '.azureedge.net', | |
- '.azure.microsoft.com'], | |
- 'Mirror Image': ['.instacontent.net', | |
- '.mirror-image.net'], | |
- 'NetDNA': ['.netdna-cdn.com', | |
- '.netdna-ssl.com', | |
- '.netdna.com'], | |
- 'Netlify': ['.netlify.com'], | |
- 'NGENIX': ['.ngenix.net'], | |
- 'NYI FTW': ['.nyiftw.net', | |
- '.nyiftw.com'], | |
- 'OnApp': ['.r.worldcdn.net', | |
- '.r.worldssl.net'], | |
- 'Optimal CDN': ['.optimalcdn.com'], | |
- 'PageRain': ['.pagerain.net'], | |
- 'PUSHR': ['.pushrcdn.com'], | |
- 'Rackspace': ['.raxcdn.com'], | |
- 'Reapleaf': ['.rlcdn.com'], | |
- 'Reflected Networks': ['.rncdn1.com', | |
- '.rncdn7.com'], | |
- 'ReSRC.it': ['.resrc.it'], | |
- 'Rev Software': ['.revcn.net', | |
- '.revdn.net'], | |
- 'Roast.io': ['.roast.io'], | |
- 'Rocket CDN': ['.streamprovider.net'], | |
- 'section.io': ['.squixa.net'], | |
- 'SFR': ['cdn.sfr.net'], | |
- 'Simple CDN': ['.simplecdn.net'], | |
- 'Singular CDN': ['.singularcdn.net.br'], | |
- 'StackPath': ['.stackpathdns.com'], | |
- 'SwiftCDN': ['.swiftcdn1.com', | |
- '.swiftserve.com'], | |
- 'Taobao': ['.gslb.taobao.com', | |
- 'tbcdn.cn', | |
- '.taobaocdn.com'], | |
- 'Telenor': ['.cdntel.net'], | |
- 'TRBCDN': ['.trbcdn.net'], | |
- 'Twitter': ['.twimg.com'], | |
- 'UnicornCDN': ['.unicorncdn.net'], | |
- 'VegaCDN': ['.vegacdn.vn', | |
- '.vegacdn.com'], | |
- 'VoxCDN': ['.voxcdn.net'], | |
- 'WordPress': ['.wp.com', | |
- '.wordpress.com', | |
- '.gravatar.com'], | |
- 'XLabs Security': ['.xlabs.com.br', | |
- '.armor.zone'], | |
- 'Yahoo': ['.ay1.b.yahoo.com', | |
- '.yimg.', | |
- '.yahooapis.com'], | |
- 'Yottaa': ['.yottaa.net'], | |
- 'Zenedge': ['.zenedge.net'] | |
+ "Advanced Hosters CDN": [".pix-cdn.org"], | |
+ "afxcdn.net": [".afxcdn.net"], | |
+ "Akamai": [ | |
+ ".akamai.net", | |
+ ".akamaized.net", | |
+ ".akamaiedge.net", | |
+ ".akamaihd.net", | |
+ ".edgesuite.net", | |
+ ".edgekey.net", | |
+ ".srip.net", | |
+ ".akamaitechnologies.com", | |
+ ".akamaitechnologies.fr", | |
+ ], | |
+ "Akamai China CDN": [".tl88.net"], | |
+ "Alimama": [".gslb.tbcache.com"], | |
+ "Amazon CloudFront": [".cloudfront.net"], | |
+ "Aryaka": [".aads1.net", ".aads-cn.net", ".aads-cng.net"], | |
+ "AT&T": [".att-dsa.net"], | |
+ "Azion": [".azioncdn.net", ".azioncdn.com", ".azion.net"], | |
+ "BelugaCDN": [".belugacdn.com", ".belugacdn.link"], | |
+ "Bison Grid": [".bisongrid.net"], | |
+ "BitGravity": [".bitgravity.com"], | |
+ "Blue Hat Network": [".bluehatnetwork.com"], | |
+ "BO.LT": ["bo.lt"], | |
+ "BunnyCDN": [".b-cdn.net"], | |
+ "Cachefly": [".cachefly.net"], | |
+ "Caspowa": [".caspowa.com"], | |
+ "Cedexis": [".cedexis.net"], | |
+ "CDN77": [".cdn77.net", ".cdn77.org"], | |
+ "CDNetworks": [".cdngc.net", ".gccdn.net", ".panthercdn.com"], | |
+ "CDNsun": [".cdnsun.net"], | |
+ "CDNvideo": [".cdnvideo.ru", ".cdnvideo.net"], | |
+ "ChinaCache": [".ccgslb.com"], | |
+ "ChinaNetCenter": [ | |
+ ".lxdns.com", | |
+ ".wscdns.com", | |
+ ".wscloudcdn.com", | |
+ ".ourwebpic.com", | |
+ ], | |
+ "Cloudflare": [".cloudflare.com", ".cloudflare.net"], | |
+ "Cotendo CDN": [".cotcdn.net"], | |
+ "cubeCDN": [".cubecdn.net"], | |
+ "Edgecast": [ | |
+ "edgecastcdn.net", | |
+ ".systemcdn.net", | |
+ ".transactcdn.net", | |
+ ".v1cdn.net", | |
+ ".v2cdn.net", | |
+ ".v3cdn.net", | |
+ ".v4cdn.net", | |
+ ".v5cdn.net", | |
+ ], | |
+ "Facebook": [ | |
+ ".facebook.com", | |
+ ".facebook.net", | |
+ ".fbcdn.net", | |
+ ".cdninstagram.com", | |
+ ], | |
+ "Fastly": [".fastly.net", ".fastlylb.net", ".nocookie.net"], | |
+ "GoCache": [".cdn.gocache.net"], | |
+ "Google": [ | |
+ ".google.", | |
+ "googlesyndication.", | |
+ "youtube.", | |
+ ".googleusercontent.com", | |
+ "googlehosted.com", | |
+ ".gstatic.com", | |
+ ".doubleclick.net", | |
+ ], | |
+ "HiberniaCDN": [".hiberniacdn.com"], | |
+ "Highwinds": ["hwcdn.net"], | |
+ "Hosting4CDN": [".hosting4cdn.com"], | |
+ "ImageEngine": [".imgeng.in"], | |
+ "Incapsula": [".incapdns.net"], | |
+ "Instart Logic": [".insnw.net", ".inscname.net"], | |
+ "Internap": [".internapcdn.net"], | |
+ "jsDelivr": ["cdn.jsdelivr.net"], | |
+ "KeyCDN": [".kxcdn.com"], | |
+ "KINX CDN": [".kinxcdn.com", ".kinxcdn.net"], | |
+ "LeaseWeb CDN": [".lswcdn.net", ".lswcdn.eu"], | |
+ "Level 3": [".footprint.net", ".fpbns.net"], | |
+ "Limelight": [".llnwd.net", ".llnwi.net", ".lldns.net"], | |
+ "MediaCloud": [".cdncloud.net.au"], | |
+ "Medianova": [".mncdn.com", ".mncdn.net", ".mncdn.org"], | |
+ "Microsoft Azure": [ | |
+ ".vo.msecnd.net", | |
+ ".azureedge.net", | |
+ ".azure.microsoft.com", | |
+ ], | |
+ "Mirror Image": [".instacontent.net", ".mirror-image.net"], | |
+ "NetDNA": [".netdna-cdn.com", ".netdna-ssl.com", ".netdna.com"], | |
+ "Netlify": [".netlify.com"], | |
+ "NGENIX": [".ngenix.net"], | |
+ "NYI FTW": [".nyiftw.net", ".nyiftw.com"], | |
+ "OnApp": [".r.worldcdn.net", ".r.worldssl.net"], | |
+ "Optimal CDN": [".optimalcdn.com"], | |
+ "PageRain": [".pagerain.net"], | |
+ "PUSHR": [".pushrcdn.com"], | |
+ "Rackspace": [".raxcdn.com"], | |
+ "Reapleaf": [".rlcdn.com"], | |
+ "Reflected Networks": [".rncdn1.com", ".rncdn7.com"], | |
+ "ReSRC.it": [".resrc.it"], | |
+ "Rev Software": [".revcn.net", ".revdn.net"], | |
+ "Roast.io": [".roast.io"], | |
+ "Rocket CDN": [".streamprovider.net"], | |
+ "section.io": [".squixa.net"], | |
+ "SFR": ["cdn.sfr.net"], | |
+ "Simple CDN": [".simplecdn.net"], | |
+ "Singular CDN": [".singularcdn.net.br"], | |
+ "StackPath": [".stackpathdns.com"], | |
+ "SwiftCDN": [".swiftcdn1.com", ".swiftserve.com"], | |
+ "Taobao": [".gslb.taobao.com", "tbcdn.cn", ".taobaocdn.com"], | |
+ "Telenor": [".cdntel.net"], | |
+ "TRBCDN": [".trbcdn.net"], | |
+ "Twitter": [".twimg.com"], | |
+ "UnicornCDN": [".unicorncdn.net"], | |
+ "VegaCDN": [".vegacdn.vn", ".vegacdn.com"], | |
+ "VoxCDN": [".voxcdn.net"], | |
+ "WordPress": [".wp.com", ".wordpress.com", ".gravatar.com"], | |
+ "XLabs Security": [".xlabs.com.br", ".armor.zone"], | |
+ "Yahoo": [".ay1.b.yahoo.com", ".yimg.", ".yahooapis.com"], | |
+ "Yottaa": [".yottaa.net"], | |
+ "Zenedge": [".zenedge.net"], | |
} | |
self.cdn_headers = { | |
- 'Airee': [{'Server': 'Airee'}], | |
- 'Amazon CloudFront': [{'Via': 'CloudFront'}], | |
- 'Aryaka': [{'X-Ar-Debug': ''}], | |
- 'BelugaCDN': [{'Server': 'Beluga'}, | |
- {'X-Beluga-Cache-Status': ''}], | |
- 'BunnyCDN': [{'Server': 'BunnyCDN'}], | |
- 'Caspowa': [{'Server': 'Caspowa'}], | |
- 'CDN': [{'X-Edge-IP': ''}, | |
- {'X-Edge-Location': ''}], | |
- 'CDNetworks': [{'X-Px': ''}], | |
- 'ChinaNetCenter': [{'X-Cache': 'cache.51cdn.com'}], | |
- 'Cloudflare': [{'Server': 'cloudflare'}], | |
- 'Edgecast': [{'Server': 'ECS'}, | |
- {'Server': 'ECAcc'}, | |
- {'Server': 'ECD'}], | |
- 'Fastly': [{'X-Served-By': 'cache-', 'X-Cache': ''}], | |
- 'Fly': [{'Server': 'Fly.io'}], | |
- 'GoCache': [{'Server': 'gocache'}], | |
- 'Google': [{'Server': 'sffe'}, | |
- {'Server': 'gws'}, | |
- {'Server': 'GSE'}, | |
- {'Server': 'Golfe2'}, | |
- {'Via': 'google'}], | |
- 'HiberniaCDN': [{'Server': 'hiberniacdn'}], | |
- 'Highwinds': [{'X-HW': ''}], | |
- 'ImageEngine': [{'Server': 'ScientiaMobile ImageEngine'}], | |
- 'Incapsula': [{'X-CDN': 'Incapsula'}, | |
- {'X-Iinfo': ''}], | |
- 'Instart Logic': [{'X-Instart-Request-ID': 'instart'}], | |
- 'LeaseWeb CDN': [{'Server': 'leasewebcdn'}], | |
- 'Medianova': [{'Server': 'MNCDN'}], | |
- 'Naver': [{'Server': 'Testa/'}], | |
- 'NetDNA': [{'Server': 'NetDNA'}], | |
- 'Netlify': [{'Server': 'Netlify'}], | |
- 'NYI FTW': [{'X-Powered-By': 'NYI FTW'}, | |
- {'X-Delivered-By': 'NYI FTW'}], | |
- 'Optimal CDN': [{'Server': 'Optimal CDN'}], | |
- 'OVH CDN': [{'X-CDN-Geo': ''}, | |
- {'X-CDN-Pop': ''}], | |
- 'PUSHR': [{'Via': 'PUSHR'}], | |
- 'ReSRC.it': [{'Server': 'ReSRC'}], | |
- 'Rev Software': [{'Via': 'Rev-Cache'}, | |
- {'X-Rev-Cache': ''}], | |
- 'Roast.io': [{'Server': 'Roast.io'}], | |
- 'Rocket CDN': [{'x-rocket-node': ''}], | |
- 'section.io': [{'section-io-id': ''}], | |
- 'Singular CDN': [{'Server': 'SingularCDN'}], | |
- 'Sucuri Firewall': [{'Server': 'Sucuri/Cloudproxy'}, | |
- {'x-sucuri-id': ''}], | |
- 'Surge': [{'Server': 'SurgeCDN'}], | |
- 'Twitter': [{'Server': 'tsa_b'}], | |
- 'UnicornCDN': [{'Server': 'UnicornCDN'}], | |
- 'XLabs Security': [{'x-cdn': 'XLabs Security'}], | |
- 'Yunjiasu': [{'Server': 'yunjiasu'}], | |
- 'Zenedge': [{'X-Cdn': 'Zenedge'}] | |
+ "Airee": [{"Server": "Airee"}], | |
+ "Amazon CloudFront": [{"Via": "CloudFront"}], | |
+ "Aryaka": [{"X-Ar-Debug": ""}], | |
+ "BelugaCDN": [{"Server": "Beluga"}, {"X-Beluga-Cache-Status": ""}], | |
+ "BunnyCDN": [{"Server": "BunnyCDN"}], | |
+ "Caspowa": [{"Server": "Caspowa"}], | |
+ "CDN": [{"X-Edge-IP": ""}, {"X-Edge-Location": ""}], | |
+ "CDNetworks": [{"X-Px": ""}], | |
+ "ChinaNetCenter": [{"X-Cache": "cache.51cdn.com"}], | |
+ "Cloudflare": [{"Server": "cloudflare"}], | |
+ "Edgecast": [{"Server": "ECS"}, {"Server": "ECAcc"}, {"Server": "ECD"}], | |
+ "Fastly": [{"X-Served-By": "cache-", "X-Cache": ""}], | |
+ "Fly": [{"Server": "Fly.io"}], | |
+ "GoCache": [{"Server": "gocache"}], | |
+ "Google": [ | |
+ {"Server": "sffe"}, | |
+ {"Server": "gws"}, | |
+ {"Server": "GSE"}, | |
+ {"Server": "Golfe2"}, | |
+ {"Via": "google"}, | |
+ ], | |
+ "HiberniaCDN": [{"Server": "hiberniacdn"}], | |
+ "Highwinds": [{"X-HW": ""}], | |
+ "ImageEngine": [{"Server": "ScientiaMobile ImageEngine"}], | |
+ "Incapsula": [{"X-CDN": "Incapsula"}, {"X-Iinfo": ""}], | |
+ "Instart Logic": [{"X-Instart-Request-ID": "instart"}], | |
+ "LeaseWeb CDN": [{"Server": "leasewebcdn"}], | |
+ "Medianova": [{"Server": "MNCDN"}], | |
+ "Naver": [{"Server": "Testa/"}], | |
+ "NetDNA": [{"Server": "NetDNA"}], | |
+ "Netlify": [{"Server": "Netlify"}], | |
+ "NYI FTW": [{"X-Powered-By": "NYI FTW"}, {"X-Delivered-By": "NYI FTW"}], | |
+ "Optimal CDN": [{"Server": "Optimal CDN"}], | |
+ "OVH CDN": [{"X-CDN-Geo": ""}, {"X-CDN-Pop": ""}], | |
+ "PUSHR": [{"Via": "PUSHR"}], | |
+ "ReSRC.it": [{"Server": "ReSRC"}], | |
+ "Rev Software": [{"Via": "Rev-Cache"}, {"X-Rev-Cache": ""}], | |
+ "Roast.io": [{"Server": "Roast.io"}], | |
+ "Rocket CDN": [{"x-rocket-node": ""}], | |
+ "section.io": [{"section-io-id": ""}], | |
+ "Singular CDN": [{"Server": "SingularCDN"}], | |
+ "Sucuri Firewall": [{"Server": "Sucuri/Cloudproxy"}, {"x-sucuri-id": ""}], | |
+ "Surge": [{"Server": "SurgeCDN"}], | |
+ "Twitter": [{"Server": "tsa_b"}], | |
+ "UnicornCDN": [{"Server": "UnicornCDN"}], | |
+ "XLabs Security": [{"x-cdn": "XLabs Security"}], | |
+ "Yunjiasu": [{"Server": "yunjiasu"}], | |
+ "Zenedge": [{"X-Cdn": "Zenedge"}], | |
} | |
# spell-checker: enable | |
def start(self): | |
"""Start running the optimization checks""" | |
- logging.debug('Starting optimization checks...') | |
- optimization_checks_disabled = bool('noopt' in self.job and self.job['noopt']) | |
+ logging.debug("Starting optimization checks...") | |
+ optimization_checks_disabled = bool("noopt" in self.job and self.job["noopt"]) | |
if self.requests is not None and not optimization_checks_disabled: | |
self.running_checks = True | |
# Run the slow checks in background threads | |
self.cdn_thread = threading.Thread(target=self.check_cdn) | |
self.hosting_thread = threading.Thread(target=self.check_hosting) | |
@@ -269,161 +239,195 @@ | |
self.hosting_thread.start() | |
self.gzip_thread.start() | |
self.image_thread.start() | |
self.progressive_thread.start() | |
# collect the miscellaneous results directly | |
- logging.debug('Checking keep-alive.') | |
+ logging.debug("Checking keep-alive.") | |
self.check_keep_alive() | |
- logging.debug('Checking caching.') | |
+ logging.debug("Checking caching.") | |
self.check_cache_static() | |
- logging.debug('Optimization checks started.') | |
+ logging.debug("Optimization checks started.") | |
def join(self): | |
"""Wait for the optimization checks to complete and record the results""" | |
- logging.debug('Waiting for optimization checks to complete') | |
+ logging.debug("Waiting for optimization checks to complete") | |
if self.running_checks: | |
- logging.debug('Waiting for progressive JPEG check to complete') | |
+ logging.debug("Waiting for progressive JPEG check to complete") | |
if self.progressive_thread is not None: | |
self.progressive_thread.join() | |
self.progressive_thread = None | |
if self.progressive_time is not None: | |
- logging.debug("Progressive JPEG check took %0.3f seconds", self.progressive_time) | |
- logging.debug('Waiting for gzip check to complete') | |
+ logging.debug( | |
+ "Progressive JPEG check took %0.3f seconds", self.progressive_time | |
+ ) | |
+ logging.debug("Waiting for gzip check to complete") | |
if self.gzip_thread is not None: | |
self.gzip_thread.join() | |
self.gzip_thread = None | |
if self.gzip_time is not None: | |
logging.debug("gzip check took %0.3f seconds", self.gzip_time) | |
- logging.debug('Waiting for image check to complete') | |
+ logging.debug("Waiting for image check to complete") | |
if self.image_thread is not None: | |
self.image_thread.join() | |
self.image_thread = None | |
if self.image_time is not None: | |
logging.debug("image check took %0.3f seconds", self.image_time) | |
- logging.debug('Waiting for CDN check to complete') | |
+ logging.debug("Waiting for CDN check to complete") | |
if self.cdn_thread is not None: | |
self.cdn_thread.join() | |
self.cdn_thread = None | |
if self.cdn_time is not None: | |
logging.debug("CDN check took %0.3f seconds", self.cdn_time) | |
- logging.debug('Waiting for Hosting check to complete') | |
+ logging.debug("Waiting for Hosting check to complete") | |
if self.hosting_thread is not None: | |
self.hosting_thread.join() | |
self.hosting_thread = None | |
if self.hosting_time is not None: | |
logging.debug("Hosting check took %0.3f seconds", self.hosting_time) | |
# Merge the results together | |
for request_id in self.cdn_results: | |
if request_id not in self.results: | |
self.results[request_id] = {} | |
- self.results[request_id]['cdn'] = self.cdn_results[request_id] | |
+ self.results[request_id]["cdn"] = self.cdn_results[request_id] | |
for request_id in self.gzip_results: | |
if request_id not in self.results: | |
self.results[request_id] = {} | |
- self.results[request_id]['gzip'] = self.gzip_results[request_id] | |
+ self.results[request_id]["gzip"] = self.gzip_results[request_id] | |
for request_id in self.image_results: | |
if request_id not in self.results: | |
self.results[request_id] = {} | |
- self.results[request_id]['image'] = self.image_results[request_id] | |
+ self.results[request_id]["image"] = self.image_results[request_id] | |
for request_id in self.progressive_results: | |
if request_id not in self.results: | |
self.results[request_id] = {} | |
- self.results[request_id]['progressive'] = self.progressive_results[request_id] | |
- if self.task is not None and 'page_data' in self.task: | |
+ self.results[request_id]["progressive"] = self.progressive_results[ | |
+ request_id | |
+ ] | |
+ if self.task is not None and "page_data" in self.task: | |
for name in self.hosting_results: | |
- self.task['page_data'][name] = self.hosting_results[name] | |
+ self.task["page_data"][name] = self.hosting_results[name] | |
# Save the results | |
if self.results: | |
- path = os.path.join(self.task['dir'], self.task['prefix']) + '_optimization.json.gz' | |
- gz_file = gzip.open(path, 'wb', 7) | |
+ path = ( | |
+ os.path.join(self.task["dir"], self.task["prefix"]) | |
+ + "_optimization.json.gz" | |
+ ) | |
+ gz_file = gzip.open(path, "wb", 7) | |
if gz_file: | |
gz_file.write(json.dumps(self.results)) | |
gz_file.close() | |
- logging.debug('Optimization checks complete') | |
+ logging.debug("Optimization checks complete") | |
return self.results | |
def check_keep_alive(self): | |
"""Check for requests where the connection is force-closed""" | |
from urlparse import urlsplit | |
+ | |
# build a list of origins and how many requests were issued to each | |
origins = {} | |
for request_id in self.requests: | |
request = self.requests[request_id] | |
- if 'url' in request: | |
- url = request['full_url'] if 'full_url' in request else request['url'] | |
+ if "url" in request: | |
+ url = request["full_url"] if "full_url" in request else request["url"] | |
parsed = urlsplit(url) | |
- origin = parsed.scheme + '://' + parsed.netloc | |
+ origin = parsed.scheme + "://" + parsed.netloc | |
if origin not in origins: | |
origins[origin] = 0 | |
origins[origin] += 1 | |
for request_id in self.requests: | |
try: | |
request = self.requests[request_id] | |
- if 'url' in request: | |
- check = {'score': 100} | |
- url = request['full_url'] if 'full_url' in request else request['url'] | |
+ if "url" in request: | |
+ check = {"score": 100} | |
+ url = ( | |
+ request["full_url"] if "full_url" in request else request["url"] | |
+ ) | |
parsed = urlsplit(url) | |
- origin = parsed.scheme + '://' + parsed.netloc | |
+ origin = parsed.scheme + "://" + parsed.netloc | |
if origins[origin] > 1: | |
- check['score'] = 100 | |
- keep_alive = self.get_header_value(request['response_headers'], | |
- 'Connection') | |
- if keep_alive is not None and keep_alive.lower().strip().find('close') > -1: | |
- check['score'] = 0 | |
+ check["score"] = 100 | |
+ keep_alive = self.get_header_value( | |
+ request["response_headers"], "Connection" | |
+ ) | |
+ if ( | |
+ keep_alive is not None | |
+ and keep_alive.lower().strip().find("close") > -1 | |
+ ): | |
+ check["score"] = 0 | |
if request_id not in self.results: | |
self.results[request_id] = {} | |
- self.results[request_id]['keep_alive'] = check | |
+ self.results[request_id]["keep_alive"] = check | |
except Exception: | |
pass | |
def get_time_remaining(self, request): | |
"""See if a request is static and how long it can be cached for""" | |
from email.utils import parsedate | |
- re_max_age = re.compile(r'max-age[ ]*=[ ]*(?P<maxage>[\d]+)') | |
+ | |
+ re_max_age = re.compile(r"max-age[ ]*=[ ]*(?P<maxage>[\d]+)") | |
is_static = False | |
time_remaining = -1 | |
try: | |
- if 'response_headers' in request: | |
- content_length = self.get_header_value(request['response_headers'], | |
- 'Content-Length') | |
+ if "response_headers" in request: | |
+ content_length = self.get_header_value( | |
+ request["response_headers"], "Content-Length" | |
+ ) | |
if content_length is not None: | |
- content_length = int(re.search(r'\d+', str(content_length)).group()) | |
+ content_length = int(re.search(r"\d+", str(content_length)).group()) | |
if content_length == 0: | |
return is_static, time_remaining | |
- if 'response_headers' in request: | |
- content_type = self.get_header_value(request['response_headers'], | |
- 'Content-Type') | |
- if content_type is None or \ | |
- (content_type.find('/html') == -1 and | |
- content_type.find('/cache-manifest') == -1): | |
+ if "response_headers" in request: | |
+ content_type = self.get_header_value( | |
+ request["response_headers"], "Content-Type" | |
+ ) | |
+ if content_type is None or ( | |
+ content_type.find("/html") == -1 | |
+ and content_type.find("/cache-manifest") == -1 | |
+ ): | |
is_static = True | |
- cache = self.get_header_value(request['response_headers'], 'Cache-Control') | |
- pragma = self.get_header_value(request['response_headers'], 'Pragma') | |
- expires = self.get_header_value(request['response_headers'], 'Expires') | |
+ cache = self.get_header_value( | |
+ request["response_headers"], "Cache-Control" | |
+ ) | |
+ pragma = self.get_header_value( | |
+ request["response_headers"], "Pragma" | |
+ ) | |
+ expires = self.get_header_value( | |
+ request["response_headers"], "Expires" | |
+ ) | |
if cache is not None: | |
cache = cache.lower() | |
- if cache.find('no-store') > -1 or cache.find('no-cache') > -1: | |
+ if ( | |
+ cache.find("no-store") > -1 | |
+ or cache.find("no-cache") > -1 | |
+ ): | |
is_static = False | |
if is_static and pragma is not None: | |
pragma = pragma.lower() | |
- if pragma.find('no-cache') > -1: | |
+ if pragma.find("no-cache") > -1: | |
is_static = False | |
if is_static: | |
time_remaining = 0 | |
if cache is not None: | |
matches = re.search(re_max_age, cache) | |
if matches: | |
- time_remaining = int(matches.groupdict().get('maxage')) | |
- age = self.get_header_value(request['response_headers'], 'Age') | |
+ time_remaining = int( | |
+ matches.groupdict().get("maxage") | |
+ ) | |
+ age = self.get_header_value( | |
+ request["response_headers"], "Age" | |
+ ) | |
if time_remaining == 0: | |
is_static = False | |
time_remaining = -1 | |
elif age is not None: | |
- time_remaining -= int(re.search(r'\d+', | |
- str(age).strip()).group()) | |
+ time_remaining -= int( | |
+ re.search(r"\d+", str(age).strip()).group() | |
+ ) | |
elif expires is not None: | |
- date = self.get_header_value(request['response_headers'], 'Date') | |
+ date = self.get_header_value( | |
+ request["response_headers"], "Date" | |
+ ) | |
exp = time.mktime(parsedate(expires)) | |
if date is not None: | |
now = time.mktime(parsedate(date)) | |
else: | |
now = time.time() | |
@@ -437,41 +441,45 @@ | |
def check_cache_static(self): | |
"""Check static resources for how long they are cacheable for""" | |
for request_id in self.requests: | |
try: | |
request = self.requests[request_id] | |
- check = {'score': -1, 'time': 0} | |
- if 'status' in request and request['status'] == 200: | |
+ check = {"score": -1, "time": 0} | |
+ if "status" in request and request["status"] == 200: | |
is_static, time_remaining = self.get_time_remaining(request) | |
if is_static: | |
- check['time'] = time_remaining | |
+ check["time"] = time_remaining | |
if time_remaining >= 604800: # 7 days | |
- check['score'] = 100 | |
+ check["score"] = 100 | |
elif time_remaining >= 3600: # 1 hour | |
- check['score'] = 50 | |
+ check["score"] = 50 | |
else: | |
- check['score'] = 0 | |
- if check['score'] >= 0: | |
+ check["score"] = 0 | |
+ if check["score"] >= 0: | |
if request_id not in self.results: | |
self.results[request_id] = {} | |
- self.results[request_id]['cache'] = check | |
+ self.results[request_id]["cache"] = check | |
except Exception: | |
pass | |
def check_hosting(self): | |
"""Pull the data needed to determine the hosting""" | |
start = monotonic.monotonic() | |
- self.hosting_results['base_page_ip_ptr'] = '' | |
- self.hosting_results['base_page_cname'] = '' | |
- self.hosting_results['base_page_dns_server'] = '' | |
+ self.hosting_results["base_page_ip_ptr"] = "" | |
+ self.hosting_results["base_page_cname"] = "" | |
+ self.hosting_results["base_page_dns_server"] = "" | |
domain = None | |
- if self.task is not None and 'page_data' in self.task and \ | |
- 'document_hostname' in self.task['page_data']: | |
- domain = self.task['page_data']['document_hostname'] | |
+ if ( | |
+ self.task is not None | |
+ and "page_data" in self.task | |
+ and "document_hostname" in self.task["page_data"] | |
+ ): | |
+ domain = self.task["page_data"]["document_hostname"] | |
if domain is not None: | |
try: | |
from dns import resolver, reversename | |
+ | |
dns_resolver = resolver.Resolver() | |
dns_resolver.timeout = 5 | |
# reverse-lookup the edge server | |
try: | |
addresses = dns_resolver.query(domain) | |
@@ -479,63 +487,66 @@ | |
addr = str(addresses[0]) | |
addr_name = reversename.from_address(addr) | |
if addr_name: | |
name = str(dns_resolver.query(addr_name, "PTR")[0]) | |
if name: | |
- self.hosting_results['base_page_ip_ptr'] = name.strip('. ') | |
+ self.hosting_results["base_page_ip_ptr"] = name.strip( | |
+ ". " | |
+ ) | |
except Exception: | |
pass | |
# get the CNAME for the address | |
try: | |
- answers = dns_resolver.query(domain, 'CNAME') | |
+ answers = dns_resolver.query(domain, "CNAME") | |
if answers and len(answers): | |
for rdata in answers: | |
- name = '.'.join(rdata.target).strip(' .') | |
+ name = ".".join(rdata.target).strip(" .") | |
if name != domain: | |
- self.hosting_results['base_page_cname'] = name | |
+ self.hosting_results["base_page_cname"] = name | |
break | |
except Exception: | |
pass | |
# get the name server for the domain | |
done = False | |
while domain is not None and not done: | |
try: | |
dns_servers = dns_resolver.query(domain, "NS") | |
- dns_name = str(dns_servers[0].target).strip('. ') | |
+ dns_name = str(dns_servers[0].target).strip(". ") | |
if dns_name: | |
- self.hosting_results['base_page_dns_server'] = dns_name | |
+ self.hosting_results["base_page_dns_server"] = dns_name | |
done = True | |
except Exception: | |
pass | |
- pos = domain.find('.') | |
+ pos = domain.find(".") | |
if pos > 0: | |
- domain = domain[pos + 1:] | |
+ domain = domain[pos + 1 :] | |
else: | |
domain = None | |
except Exception: | |
pass | |
self.hosting_time = monotonic.monotonic() - start | |
def check_cdn(self): | |
"""Check each request to see if it was served from a CDN""" | |
from urlparse import urlparse | |
+ | |
start = monotonic.monotonic() | |
# First pass, build a list of domains and see if the headers or domain matches | |
static_requests = {} | |
domains = {} | |
for request_id in self.requests: | |
request = self.requests[request_id] | |
is_static, _ = self.get_time_remaining(request) | |
if is_static: | |
static_requests[request_id] = True | |
- if 'url' in request: | |
- url = request['full_url'] if 'full_url' in request else request['url'] | |
+ if "url" in request: | |
+ url = request["full_url"] if "full_url" in request else request["url"] | |
domain = urlparse(url).hostname | |
if domain is not None: | |
if domain not in domains: | |
# Check the domain itself against the CDN list | |
- domains[domain] = '' | |
+ domains[domain] = "" | |
provider = self.check_cdn_name(domain) | |
if provider is not None: | |
domains[domain] = provider | |
# Spawn several workers to do CNAME lookups for the unknown domains | |
count = 0 | |
@@ -553,47 +564,48 @@ | |
for thread in threads: | |
thread.join() | |
try: | |
while True: | |
dns_result = self.dns_result_queue.get_nowait() | |
- domains[dns_result['domain']] = dns_result['provider'] | |
+ domains[dns_result["domain"]] = dns_result["provider"] | |
except Exception: | |
pass | |
# Final pass, populate the CDN info for each request | |
for request_id in self.requests: | |
- check = {'score': -1, 'provider': ''} | |
+ check = {"score": -1, "provider": ""} | |
request = self.requests[request_id] | |
if request_id in static_requests: | |
- check['score'] = 0 | |
- if 'url' in request: | |
- url = request['full_url'] if 'full_url' in request else request['url'] | |
+ check["score"] = 0 | |
+ if "url" in request: | |
+ url = request["full_url"] if "full_url" in request else request["url"] | |
domain = urlparse(url).hostname | |
if domain is not None: | |
if domain in domains and domains[domain]: | |
- check['score'] = 100 | |
- check['provider'] = domains[domain] | |
- if not check['provider'] and 'response_headers' in request: | |
- provider = self.check_cdn_headers(request['response_headers']) | |
+ check["score"] = 100 | |
+ check["provider"] = domains[domain] | |
+ if not check["provider"] and "response_headers" in request: | |
+ provider = self.check_cdn_headers(request["response_headers"]) | |
if provider is not None: | |
- check['score'] = 100 | |
- check['provider'] = provider | |
+ check["score"] = 100 | |
+ check["provider"] = provider | |
self.cdn_results[request_id] = check | |
self.cdn_time = monotonic.monotonic() - start | |
def find_dns_cdn(self, domain, depth=0): | |
"""Recursively check a CNAME chain""" | |
from dns import resolver, reversename | |
+ | |
dns_resolver = resolver.Resolver() | |
dns_resolver.timeout = 1 | |
provider = self.check_cdn_name(domain) | |
# First do a CNAME check | |
if provider is None: | |
try: | |
- answers = dns_resolver.query(domain, 'CNAME') | |
+ answers = dns_resolver.query(domain, "CNAME") | |
if answers and len(answers): | |
for rdata in answers: | |
- name = '.'.join(rdata.target).strip(' .') | |
+ name = ".".join(rdata.target).strip(" .") | |
if name != domain: | |
provider = self.check_cdn_name(name) | |
if provider is None and depth < 10: | |
provider = self.find_dns_cdn(name, depth + 1) | |
if provider is not None: | |
@@ -620,11 +632,11 @@ | |
try: | |
while True: | |
domain = self.dns_lookup_queue.get_nowait() | |
provider = self.find_dns_cdn(domain) | |
if provider is not None: | |
- self.dns_result_queue.put({'domain': domain, 'provider': provider}) | |
+ self.dns_result_queue.put({"domain": domain, "provider": provider}) | |
self.dns_lookup_queue.task_done() | |
except Exception: | |
pass | |
def check_cdn_name(self, domain): | |
@@ -654,78 +666,90 @@ | |
if len(check) and value.find(check) == -1: | |
all_match = False | |
break | |
if all_match: | |
matched_cdns.append(cdn) | |
- break; | |
+ break | |
if not len(matched_cdns): | |
return None | |
- return ', '.join(matched_cdns) | |
+ return ", ".join(matched_cdns) | |
def check_gzip(self): | |
"""Check each request to see if it can be compressed""" | |
start = monotonic.monotonic() | |
for request_id in self.requests: | |
try: | |
request = self.requests[request_id] | |
- content_length = self.get_header_value(request['response_headers'], | |
- 'Content-Length') | |
- if 'objectSize' in request: | |
- content_length = request['objectSize'] | |
+ content_length = self.get_header_value( | |
+ request["response_headers"], "Content-Length" | |
+ ) | |
+ if "objectSize" in request: | |
+ content_length = request["objectSize"] | |
elif content_length is not None: | |
- content_length = int(re.search(r'\d+', str(content_length)).group()) | |
- elif 'transfer_size' in request: | |
- content_length = request['transfer_size'] | |
+ content_length = int(re.search(r"\d+", str(content_length)).group()) | |
+ elif "transfer_size" in request: | |
+ content_length = request["transfer_size"] | |
if content_length is None: | |
content_length = 0 | |
- check = {'score': 0, 'size': content_length, 'target_size': content_length} | |
+ check = { | |
+ "score": 0, | |
+ "size": content_length, | |
+ "target_size": content_length, | |
+ } | |
encoding = None | |
- if 'response_headers' in request: | |
- encoding = self.get_header_value(request['response_headers'], | |
- 'Content-Encoding') | |
+ if "response_headers" in request: | |
+ encoding = self.get_header_value( | |
+ request["response_headers"], "Content-Encoding" | |
+ ) | |
# Check for responses that are already compressed (ignore the level) | |
if encoding is not None: | |
- if encoding.find('gzip') >= 0 or \ | |
- encoding.find('deflate') >= 0 or \ | |
- encoding.find('br') >= 0: | |
- check['score'] = 100 | |
+ if ( | |
+ encoding.find("gzip") >= 0 | |
+ or encoding.find("deflate") >= 0 | |
+ or encoding.find("br") >= 0 | |
+ ): | |
+ check["score"] = 100 | |
# Ignore small responses that will fit in a packet | |
- if not check['score'] and content_length < 1400: | |
- check['score'] = -1 | |
+ if not check["score"] and content_length < 1400: | |
+ check["score"] = -1 | |
# Try compressing it if it isn't an image | |
- if not check['score'] and 'body' in request: | |
- sniff_type = self.sniff_file_content(request['body']) | |
+ if not check["score"] and "body" in request: | |
+ sniff_type = self.sniff_file_content(request["body"]) | |
if sniff_type is not None: | |
- check['score'] = -1 | |
+ check["score"] = -1 | |
else: | |
- out_file = request['body'] + '.gzip' | |
- with open(request['body'], 'rb') as f_in: | |
- with gzip.open(out_file, 'wb', 7) as f_out: | |
+ out_file = request["body"] + ".gzip" | |
+ with open(request["body"], "rb") as f_in: | |
+ with gzip.open(out_file, "wb", 7) as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
if os.path.isfile(out_file): | |
target_size = os.path.getsize(out_file) | |
try: | |
os.remove(out_file) | |
except Exception: | |
pass | |
if target_size is not None: | |
delta = content_length - target_size | |
# Only count it if there is at least 1 packet and 10% savings | |
- if target_size > 0 and \ | |
- delta > 1400 and \ | |
- target_size < (content_length * 0.9): | |
- check['target_size'] = target_size | |
- check['score'] = int(target_size * 100 / content_length) | |
+ if ( | |
+ target_size > 0 | |
+ and delta > 1400 | |
+ and target_size < (content_length * 0.9) | |
+ ): | |
+ check["target_size"] = target_size | |
+ check["score"] = int( | |
+ target_size * 100 / content_length | |
+ ) | |
else: | |
- check['score'] = -1 | |
+ check["score"] = -1 | |
else: | |
- check['score'] = -1 | |
+ check["score"] = -1 | |
else: | |
- check['score'] = -1 | |
- if check['score'] >= 0: | |
+ check["score"] = -1 | |
+ if check["score"] >= 0: | |
self.gzip_results[request_id] = check | |
except Exception: | |
pass | |
self.gzip_time = monotonic.monotonic() - start | |
@@ -733,67 +757,96 @@ | |
"""Check each request to see if images can be compressed better""" | |
start = monotonic.monotonic() | |
for request_id in self.requests: | |
try: | |
request = self.requests[request_id] | |
- content_length = self.get_header_value(request['response_headers'], | |
- 'Content-Length') | |
+ content_length = self.get_header_value( | |
+ request["response_headers"], "Content-Length" | |
+ ) | |
if content_length is not None: | |
- content_length = int(re.search(r'\d+', str(content_length)).group()) | |
- elif 'transfer_size' in request: | |
- content_length = request['transfer_size'] | |
- check = {'score': -1, 'size': content_length, 'target_size': content_length} | |
- if content_length and 'body' in request: | |
- sniff_type = self.sniff_file_content(request['body']) | |
- if sniff_type == 'jpeg': | |
+ content_length = int(re.search(r"\d+", str(content_length)).group()) | |
+ elif "transfer_size" in request: | |
+ content_length = request["transfer_size"] | |
+ check = { | |
+ "score": -1, | |
+ "size": content_length, | |
+ "target_size": content_length, | |
+ } | |
+ if content_length and "body" in request: | |
+ sniff_type = self.sniff_file_content(request["body"]) | |
+ if sniff_type == "jpeg": | |
if content_length < 1400: | |
- check['score'] = 100 | |
+ check["score"] = 100 | |
else: | |
# Compress it as a quality 85 stripped progressive image and compare | |
- jpeg_file = request['body'] + '.jpg' | |
- command = '{0} -define jpeg:dct-method=fast -strip '\ | |
- '-interlace Plane -quality 85 '\ | |
- '"{1}" "{2}"'.format(self.job['image_magick']['convert'], | |
- request['body'], jpeg_file) | |
+ jpeg_file = request["body"] + ".jpg" | |
+ command = ( | |
+ "{0} -define jpeg:dct-method=fast -strip " | |
+ "-interlace Plane -quality 85 " | |
+ '"{1}" "{2}"'.format( | |
+ self.job["image_magick"]["convert"], | |
+ request["body"], | |
+ jpeg_file, | |
+ ) | |
+ ) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(jpeg_file): | |
target_size = os.path.getsize(jpeg_file) | |
try: | |
os.remove(jpeg_file) | |
except Exception: | |
pass | |
delta = content_length - target_size | |
# Only count it if there is at least 1 packet savings | |
if target_size > 0 and delta > 1400: | |
- check['target_size'] = target_size | |
- check['score'] = int(target_size * 100 / content_length) | |
+ check["target_size"] = target_size | |
+ check["score"] = int( | |
+ target_size * 100 / content_length | |
+ ) | |
else: | |
- check['score'] = 100 | |
- elif sniff_type == 'png': | |
- if 'response_body' not in request: | |
- request['response_body'] = '' | |
- with open(request['body'], 'rb') as f_in: | |
- request['response_body'] = f_in.read() | |
+ check["score"] = 100 | |
+ elif sniff_type == "png": | |
+ if "response_body" not in request: | |
+ request["response_body"] = "" | |
+ with open(request["body"], "rb") as f_in: | |
+ request["response_body"] = f_in.read() | |
if content_length < 1400: | |
- check['score'] = 100 | |
+ check["score"] = 100 | |
else: | |
# spell-checker: disable | |
- image_chunks = ["iCCP", "tIME", "gAMA", "PLTE", "acTL", "IHDR", "cHRM", | |
- "bKGD", "tRNS", "sBIT", "sRGB", "pHYs", "hIST", "vpAg", | |
- "oFFs", "fcTL", "fdAT", "IDAT"] | |
+ image_chunks = [ | |
+ "iCCP", | |
+ "tIME", | |
+ "gAMA", | |
+ "PLTE", | |
+ "acTL", | |
+ "IHDR", | |
+ "cHRM", | |
+ "bKGD", | |
+ "tRNS", | |
+ "sBIT", | |
+ "sRGB", | |
+ "pHYs", | |
+ "hIST", | |
+ "vpAg", | |
+ "oFFs", | |
+ "fcTL", | |
+ "fdAT", | |
+ "IDAT", | |
+ ] | |
# spell-checker: enable | |
- body = request['response_body'] | |
+ body = request["response_body"] | |
image_size = len(body) | |
valid = True | |
target_size = 8 | |
bytes_remaining = image_size - 8 | |
pos = 8 | |
while valid and bytes_remaining >= 4: | |
- chunk_len = struct.unpack('>I', body[pos: pos + 4])[0] | |
+ chunk_len = struct.unpack(">I", body[pos : pos + 4])[0] | |
pos += 4 | |
if chunk_len + 12 <= bytes_remaining: | |
- chunk_type = body[pos: pos + 4] | |
+ chunk_type = body[pos : pos + 4] | |
pos += 4 | |
if chunk_type in image_chunks: | |
target_size += chunk_len + 12 | |
pos += chunk_len + 4 # Skip the data and CRC | |
bytes_remaining -= chunk_len + 12 | |
@@ -802,109 +855,124 @@ | |
bytes_remaining = 0 | |
if valid: | |
delta = content_length - target_size | |
# Only count it if there is at least 1 packet savings | |
if target_size > 0 and delta > 1400: | |
- check['target_size'] = target_size | |
- check['score'] = int(target_size * 100 / content_length) | |
+ check["target_size"] = target_size | |
+ check["score"] = int( | |
+ target_size * 100 / content_length | |
+ ) | |
else: | |
- check['score'] = 100 | |
- elif sniff_type == 'gif': | |
+ check["score"] = 100 | |
+ elif sniff_type == "gif": | |
if content_length < 1400: | |
- check['score'] = 100 | |
+ check["score"] = 100 | |
else: | |
is_animated = False | |
from PIL import Image | |
- with Image.open(request['body']) as gif: | |
+ | |
+ with Image.open(request["body"]) as gif: | |
try: | |
gif.seek(1) | |
except EOFError: | |
is_animated = False | |
else: | |
is_animated = True | |
if is_animated: | |
- check['score'] = 100 | |
+ check["score"] = 100 | |
else: | |
# Convert it to a PNG | |
- png_file = request['body'] + '.png' | |
- command = 'convert "{0}" "{1}"'.format(request['body'], png_file) | |
+ png_file = request["body"] + ".png" | |
+ command = 'convert "{0}" "{1}"'.format( | |
+ request["body"], png_file | |
+ ) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(png_file): | |
target_size = os.path.getsize(png_file) | |
try: | |
os.remove(png_file) | |
except Exception: | |
pass | |
delta = content_length - target_size | |
# Only count it if there is at least 1 packet savings | |
if target_size > 0 and delta > 1400: | |
- check['target_size'] = target_size | |
- check['score'] = int(target_size * 100 / content_length) | |
+ check["target_size"] = target_size | |
+ check["score"] = int( | |
+ target_size * 100 / content_length | |
+ ) | |
else: | |
- check['score'] = 100 | |
- elif sniff_type == 'webp': | |
- check['score'] = 100 | |
- if check['score'] >= 0: | |
+ check["score"] = 100 | |
+ elif sniff_type == "webp": | |
+ check["score"] = 100 | |
+ if check["score"] >= 0: | |
self.image_results[request_id] = check | |
except Exception: | |
pass | |
self.image_time = monotonic.monotonic() - start | |
def check_progressive(self): | |
"""Count the number of scan lines in each jpeg""" | |
from PIL import Image | |
+ | |
start = monotonic.monotonic() | |
for request_id in self.requests: | |
try: | |
request = self.requests[request_id] | |
- if 'body' in request: | |
- sniff_type = self.sniff_file_content(request['body']) | |
- if sniff_type == 'jpeg': | |
- check = {'size': os.path.getsize(request['body']), 'scan_count': 1} | |
- image = Image.open(request['body']) | |
+ if "body" in request: | |
+ sniff_type = self.sniff_file_content(request["body"]) | |
+ if sniff_type == "jpeg": | |
+ check = { | |
+ "size": os.path.getsize(request["body"]), | |
+ "scan_count": 1, | |
+ } | |
+ image = Image.open(request["body"]) | |
info = dict(image.info) | |
image.close() | |
- if 'progression' in info and info['progression']: | |
- check['scan_count'] = 0 | |
- if 'response_body' not in request: | |
- request['response_body'] = '' | |
- with open(request['body'], 'rb') as f_in: | |
- request['response_body'] = f_in.read() | |
- body = request['response_body'] | |
- content_length = len(request['response_body']) | |
+ if "progression" in info and info["progression"]: | |
+ check["scan_count"] = 0 | |
+ if "response_body" not in request: | |
+ request["response_body"] = "" | |
+ with open(request["body"], "rb") as f_in: | |
+ request["response_body"] = f_in.read() | |
+ body = request["response_body"] | |
+ content_length = len(request["response_body"]) | |
pos = 0 | |
try: | |
while pos < content_length: | |
- block = struct.unpack('B', body[pos])[0] | |
+ block = struct.unpack("B", body[pos])[0] | |
pos += 1 | |
- if block != 0xff: | |
+ if block != 0xFF: | |
break | |
- block = struct.unpack('B', body[pos])[0] | |
+ block = struct.unpack("B", body[pos])[0] | |
pos += 1 | |
- while block == 0xff: | |
- block = struct.unpack('B', body[pos])[0] | |
+ while block == 0xFF: | |
+ block = struct.unpack("B", body[pos])[0] | |
pos += 1 | |
- if block == 0x01 or (block >= 0xd0 and block <= 0xd9): | |
+ if block == 0x01 or ( | |
+ block >= 0xD0 and block <= 0xD9 | |
+ ): | |
continue | |
- elif block == 0xda: # Image data | |
- check['scan_count'] += 1 | |
+ elif block == 0xDA: # Image data | |
+ check["scan_count"] += 1 | |
# Seek to the next non-padded 0xff to find the next marker | |
found = False | |
while not found and pos < content_length: | |
- value = struct.unpack('B', body[pos])[0] | |
+ value = struct.unpack("B", body[pos])[0] | |
pos += 1 | |
- if value == 0xff: | |
- value = struct.unpack('B', body[pos])[0] | |
+ if value == 0xFF: | |
+ value = struct.unpack("B", body[pos])[0] | |
pos += 1 | |
if value != 0x00: | |
found = True | |
pos -= 2 | |
else: | |
- chunk = body[pos: pos + 2] | |
- block_size = struct.unpack('2B', chunk) | |
+ chunk = body[pos : pos + 2] | |
+ block_size = struct.unpack("2B", chunk) | |
pos += 2 | |
- block_size = block_size[0] * 256 + block_size[1] - 2 | |
+ block_size = ( | |
+ block_size[0] * 256 + block_size[1] - 2 | |
+ ) | |
pos += block_size | |
except Exception: | |
pass | |
self.progressive_results[request_id] = check | |
except Exception: | |
@@ -919,36 +987,36 @@ | |
value = headers[name] | |
else: | |
find = name.lower() | |
for header_name in headers: | |
check = header_name.lower() | |
- if check == find or (check[0] == ':' and check[1:] == find): | |
+ if check == find or (check[0] == ":" and check[1:] == find): | |
value = headers[header_name] | |
break | |
return value | |
def sniff_content(self, raw_bytes): | |
"""Check the beginning of the file to see if it is a known image type""" | |
content_type = None | |
hex_bytes = binascii.hexlify(raw_bytes[:14]).lower() | |
# spell-checker: disable | |
- if hex_bytes[0:6] == 'ffd8ff': | |
- content_type = 'jpeg' | |
- elif hex_bytes[0:16] == '89504e470d0a1a0a': | |
- content_type = 'png' | |
- elif raw_bytes[:6] == 'GIF87a' or raw_bytes[:6] == 'GIF89a': | |
- content_type = 'gif' | |
- elif raw_bytes[:4] == 'RIFF' and raw_bytes[8:14] == 'WEBPVP': | |
- content_type = 'webp' | |
- elif raw_bytes[:4] == 'wOF2': | |
- content_type = 'WOFF2' | |
+ if hex_bytes[0:6] == "ffd8ff": | |
+ content_type = "jpeg" | |
+ elif hex_bytes[0:16] == "89504e470d0a1a0a": | |
+ content_type = "png" | |
+ elif raw_bytes[:6] == "GIF87a" or raw_bytes[:6] == "GIF89a": | |
+ content_type = "gif" | |
+ elif raw_bytes[:4] == "RIFF" and raw_bytes[8:14] == "WEBPVP": | |
+ content_type = "webp" | |
+ elif raw_bytes[:4] == "wOF2": | |
+ content_type = "WOFF2" | |
# spell-checker: enable | |
return content_type | |
def sniff_file_content(self, image_file): | |
"""Sniff the content type from a file""" | |
content_type = None | |
- with open(image_file, 'rb') as f_in: | |
+ with open(image_file, "rb") as f_in: | |
raw = f_in.read(14) | |
content_type = self.sniff_content(raw) | |
return content_type | |
--- internal/firefox.py 2018-12-01 11:00:57.818178 +0000 | |
+++ internal/firefox.py 2019-02-06 17:08:28.372920 +0000 | |
@@ -18,10 +18,11 @@ | |
from .desktop_browser import DesktopBrowser | |
class Firefox(DesktopBrowser): | |
"""Firefox""" | |
+ | |
def __init__(self, path, options, job): | |
DesktopBrowser.__init__(self, path, options, job) | |
self.job = job | |
self.task = None | |
self.options = options | |
@@ -39,185 +40,215 @@ | |
self.start_offset = None | |
self.browser_version = None | |
self.main_request_headers = None | |
self.log_pos = {} | |
self.log_level = 5 | |
- if 'browser_info' in job and 'log_level' in job['browser_info']: | |
- self.log_level = job['browser_info']['log_level'] | |
+ if "browser_info" in job and "log_level" in job["browser_info"]: | |
+ self.log_level = job["browser_info"]["log_level"] | |
self.page = {} | |
self.requests = {} | |
self.last_activity = monotonic.monotonic() | |
- self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js') | |
- self.start_page = 'http://127.0.0.1:8888/orange.html' | |
+ self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "js") | |
+ self.start_page = "http://127.0.0.1:8888/orange.html" | |
def prepare(self, job, task): | |
"""Prepare the profile/OS for the browser""" | |
- self.moz_log = os.path.join(task['dir'], 'moz.log') | |
+ self.moz_log = os.path.join(task["dir"], "moz.log") | |
self.log_pos = {} | |
self.page = {} | |
self.requests = {} | |
self.main_request_headers = None | |
os.environ["MOZ_LOG_FILE"] = self.moz_log | |
- moz_log_env = 'timestamp,nsHttp:{0:d},nsSocketTransport:{0:d}'\ | |
- 'nsHostResolver:{0:d},pipnss:5'.format(self.log_level) | |
+ moz_log_env = ( | |
+ "timestamp,nsHttp:{0:d},nsSocketTransport:{0:d}" | |
+ "nsHostResolver:{0:d},pipnss:5".format(self.log_level) | |
+ ) | |
os.environ["MOZ_LOG"] = moz_log_env | |
- logging.debug('MOZ_LOG = %s', moz_log_env) | |
+ logging.debug("MOZ_LOG = %s", moz_log_env) | |
DesktopBrowser.prepare(self, job, task) | |
- profile_template = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'Firefox', 'profile') | |
- if not task['cached'] and os.path.isdir(profile_template): | |
- try: | |
- if os.path.isdir(task['profile']): | |
- shutil.rmtree(task['profile']) | |
- shutil.copytree(profile_template, task['profile']) | |
+ profile_template = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "Firefox", "profile" | |
+ ) | |
+ if not task["cached"] and os.path.isdir(profile_template): | |
+ try: | |
+ if os.path.isdir(task["profile"]): | |
+ shutil.rmtree(task["profile"]) | |
+ shutil.copytree(profile_template, task["profile"]) | |
except Exception: | |
pass | |
# Delete any unsent crash reports | |
crash_dir = None | |
- if platform.system() == 'Windows': | |
- if 'APPDATA' in os.environ: | |
- crash_dir = os.path.join(os.environ['APPDATA'], | |
- 'Mozilla', 'Firefox', 'Crash Reports') | |
+ if platform.system() == "Windows": | |
+ if "APPDATA" in os.environ: | |
+ crash_dir = os.path.join( | |
+ os.environ["APPDATA"], "Mozilla", "Firefox", "Crash Reports" | |
+ ) | |
else: | |
- crash_dir = os.path.join(os.path.expanduser('~'), | |
- '.mozilla', 'firefox', 'Crash Reports') | |
+ crash_dir = os.path.join( | |
+ os.path.expanduser("~"), ".mozilla", "firefox", "Crash Reports" | |
+ ) | |
if crash_dir and os.path.isdir(crash_dir): | |
logging.debug("Clearing crash reports in %s", crash_dir) | |
try: | |
shutil.rmtree(crash_dir) | |
except Exception: | |
pass | |
# Prepare the config for the extension to query | |
- if self.job['message_server'] is not None: | |
+ if self.job["message_server"] is not None: | |
config = None | |
- names = ['block', | |
- 'block_domains', | |
- 'block_domains_except', | |
- 'headers', | |
- 'cookies', | |
- 'overrideHosts'] | |
+ names = [ | |
+ "block", | |
+ "block_domains", | |
+ "block_domains_except", | |
+ "headers", | |
+ "cookies", | |
+ "overrideHosts", | |
+ ] | |
for name in names: | |
if name in task and task[name]: | |
if config is None: | |
config = {} | |
config[name] = task[name] | |
- self.job['message_server'].config = config | |
+ self.job["message_server"].config = config | |
def disable_fsync(self, command_line): | |
"""Use eatmydata if it is installed to disable fsync""" | |
- if platform.system() == 'Linux': | |
- try: | |
- cmd = ['eatmydata', 'date'] | |
- logging.debug(' '.join(cmd)) | |
+ if platform.system() == "Linux": | |
+ try: | |
+ cmd = ["eatmydata", "date"] | |
+ logging.debug(" ".join(cmd)) | |
subprocess.check_call(cmd) | |
- command_line = 'eatmydata ' + command_line | |
+ command_line = "eatmydata " + command_line | |
except Exception as err: | |
pass | |
return command_line | |
def launch(self, job, task): | |
"""Launch the browser""" | |
- if self.job['message_server'] is not None: | |
- self.job['message_server'].flush_messages() | |
+ if self.job["message_server"] is not None: | |
+ self.job["message_server"].flush_messages() | |
self.connected = False | |
from marionette_driver.marionette import Marionette | |
from marionette_driver.addons import Addons | |
- args = ['-profile', '"{0}"'.format(task['profile']), | |
- '-no-remote', | |
- '-marionette', | |
- 'about:blank'] | |
- if self.path.find(' ') > -1: | |
+ | |
+ args = [ | |
+ "-profile", | |
+ '"{0}"'.format(task["profile"]), | |
+ "-no-remote", | |
+ "-marionette", | |
+ "about:blank", | |
+ ] | |
+ if self.path.find(" ") > -1: | |
command_line = '"{0}"'.format(self.path) | |
else: | |
command_line = self.path | |
- command_line += ' ' + ' '.join(args) | |
+ command_line += " " + " ".join(args) | |
command_line = self.disable_fsync(command_line) | |
DesktopBrowser.launch_browser(self, command_line) | |
try: | |
- self.marionette = Marionette('localhost', port=2828) | |
+ self.marionette = Marionette("localhost", port=2828) | |
capabilities = None | |
- if 'ignoreSSL' in job and job['ignoreSSL']: | |
- capabilities = {'acceptInsecureCerts': True} | |
- self.marionette.start_session(timeout=self.task['time_limit'], capabilities=capabilities) | |
+ if "ignoreSSL" in job and job["ignoreSSL"]: | |
+ capabilities = {"acceptInsecureCerts": True} | |
+ self.marionette.start_session( | |
+ timeout=self.task["time_limit"], capabilities=capabilities | |
+ ) | |
self.configure_prefs() | |
- logging.debug('Installing extension') | |
+ logging.debug("Installing extension") | |
self.addons = Addons(self.marionette) | |
- extension_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'Firefox', 'extension') | |
+ extension_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "Firefox", | |
+ "extension", | |
+ ) | |
self.extension_id = self.addons.install(extension_path, temp=True) | |
- logging.debug('Resizing browser to %dx%d', task['width'], task['height']) | |
- self.marionette.set_window_rect(x=0, y=0, height=task['height'], width=task['width']) | |
- if 'browserVersion' in self.marionette.session_capabilities: | |
- self.browser_version = self.marionette.session_capabilities['browserVersion'] | |
+ logging.debug("Resizing browser to %dx%d", task["width"], task["height"]) | |
+ self.marionette.set_window_rect( | |
+ x=0, y=0, height=task["height"], width=task["width"] | |
+ ) | |
+ if "browserVersion" in self.marionette.session_capabilities: | |
+ self.browser_version = self.marionette.session_capabilities[ | |
+ "browserVersion" | |
+ ] | |
self.marionette.navigate(self.start_page) | |
time.sleep(0.5) | |
self.wait_for_extension() | |
if self.connected: | |
# Override the UA String if necessary | |
- ua_string = self.execute_js('navigator.userAgent;') | |
+ ua_string = self.execute_js("navigator.userAgent;") | |
modified = False | |
- if 'uastring' in self.job: | |
- ua_string = self.job['uastring'] | |
+ if "uastring" in self.job: | |
+ ua_string = self.job["uastring"] | |
modified = True | |
- if ua_string is not None and 'AppendUA' in task: | |
- ua_string += ' ' + task['AppendUA'] | |
+ if ua_string is not None and "AppendUA" in task: | |
+ ua_string += " " + task["AppendUA"] | |
modified = True | |
if modified: | |
logging.debug(ua_string) | |
- self.marionette.set_pref('general.useragent.override', ua_string) | |
+ self.marionette.set_pref("general.useragent.override", ua_string) | |
# Location | |
- if 'lat' in self.job and 'lng' in self.job: | |
+ if "lat" in self.job and "lng" in self.job: | |
try: | |
- lat = float(str(self.job['lat'])) | |
- lng = float(str(self.job['lng'])) | |
- location_uri = 'data:application/json,{{'\ | |
- '"status":"OK","accuracy":10.0,'\ | |
- '"location":{{"lat":{0:f},"lng":{1:f}}}'\ | |
- '}}'.format(lat, lng) | |
- logging.debug('Setting location: %s', location_uri) | |
- self.set_pref('geo.wifi.uri', location_uri) | |
+ lat = float(str(self.job["lat"])) | |
+ lng = float(str(self.job["lng"])) | |
+ location_uri = ( | |
+ "data:application/json,{{" | |
+ '"status":"OK","accuracy":10.0,' | |
+ '"location":{{"lat":{0:f},"lng":{1:f}}}' | |
+ "}}".format(lat, lng) | |
+ ) | |
+ logging.debug("Setting location: %s", location_uri) | |
+ self.set_pref("geo.wifi.uri", location_uri) | |
except Exception: | |
pass | |
# Figure out the native viewport size | |
size = self.execute_js("[window.innerWidth, window.innerHeight]") | |
logging.debug(size) | |
if size is not None and len(size) == 2: | |
- task['actual_viewport'] = {"width": size[0], "height": size[1]} | |
- if 'adjust_viewport' in job and job['adjust_viewport']: | |
- delta_x = max(task['width'] - size[0], 0) | |
- delta_y = max(task['height'] - size[1], 0) | |
+ task["actual_viewport"] = {"width": size[0], "height": size[1]} | |
+ if "adjust_viewport" in job and job["adjust_viewport"]: | |
+ delta_x = max(task["width"] - size[0], 0) | |
+ delta_y = max(task["height"] - size[1], 0) | |
if delta_x or delta_y: | |
- width = task['width'] + delta_x | |
- height = task['height'] + delta_y | |
- logging.debug('Resizing browser to %dx%d', width, height) | |
- self.marionette.set_window_rect(x=0, y=0, height=height, width=width) | |
+ width = task["width"] + delta_x | |
+ height = task["height"] + delta_y | |
+ logging.debug("Resizing browser to %dx%d", width, height) | |
+ self.marionette.set_window_rect( | |
+ x=0, y=0, height=height, width=width | |
+ ) | |
# Wait for the browser startup to finish | |
DesktopBrowser.wait_for_idle(self) | |
except Exception as err: | |
logging.exception("Error starting Firefox") | |
- task['error'] = 'Error starting Firefox: {0}'.format(err.__str__()) | |
+ task["error"] = "Error starting Firefox: {0}".format(err.__str__()) | |
def get_pref_value(self, value): | |
"""Convert a JSON pref value to Python""" | |
str_match = re.match(r'^"(.*)"$', value) | |
- if value == 'true': | |
+ if value == "true": | |
value = True | |
- elif value == 'false': | |
+ elif value == "false": | |
value = False | |
- elif re.match(r'^[\d]+$', value): | |
+ elif re.match(r"^[\d]+$", value): | |
value = int(value) | |
elif str_match: | |
value = str_match.group(1) | |
else: | |
value = None | |
return value | |
def configure_prefs(self): | |
"""Load the prefs file and configure them through Marionette""" | |
prefs = {} | |
- prefs_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'Firefox', 'profile', 'prefs.js') | |
+ prefs_file = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "Firefox", | |
+ "profile", | |
+ "prefs.js", | |
+ ) | |
with open(prefs_file) as f_in: | |
for line in f_in: | |
matches = re.search(r'user_pref\("([^"]+)",[\s]*([^\)]*)[\s]*\);', line) | |
if matches: | |
key = matches.group(1).strip() | |
@@ -246,22 +277,22 @@ | |
pass | |
self.marionette = None | |
DesktopBrowser.close_browser(self, job, task) | |
# make SURE the Firefox processes are gone | |
if platform.system() == "Linux": | |
- subprocess.call(['killall', '-9', 'firefox']) | |
- subprocess.call(['killall', '-9', 'firefox-trunk']) | |
- os.environ["MOZ_LOG_FILE"] = '' | |
- os.environ["MOZ_LOG"] = '' | |
+ subprocess.call(["killall", "-9", "firefox"]) | |
+ subprocess.call(["killall", "-9", "firefox-trunk"]) | |
+ os.environ["MOZ_LOG_FILE"] = "" | |
+ os.environ["MOZ_LOG"] = "" | |
def stop(self, job, task): | |
"""Kill the browser""" | |
self.close_browser(job, task) | |
DesktopBrowser.stop(self, job, task) | |
# delete the raw log files | |
if self.moz_log is not None: | |
- files = sorted(glob.glob(self.moz_log + '*')) | |
+ files = sorted(glob.glob(self.moz_log + "*")) | |
for path in files: | |
try: | |
os.remove(path) | |
except Exception: | |
pass | |
@@ -273,386 +304,447 @@ | |
def run_task(self, task): | |
"""Run an individual test""" | |
if self.marionette is not None and self.connected: | |
self.task = task | |
logging.debug("Running test") | |
- end_time = monotonic.monotonic() + task['test_time_limit'] | |
- task['current_step'] = 1 | |
+ end_time = monotonic.monotonic() + task["test_time_limit"] | |
+ task["current_step"] = 1 | |
recording = False | |
- while len(task['script']) and task['error'] is None and \ | |
- monotonic.monotonic() < end_time: | |
+ while ( | |
+ len(task["script"]) | |
+ and task["error"] is None | |
+ and monotonic.monotonic() < end_time | |
+ ): | |
self.prepare_task(task) | |
- command = task['script'].pop(0) | |
- if not recording and command['record']: | |
+ command = task["script"].pop(0) | |
+ if not recording and command["record"]: | |
recording = True | |
self.on_start_recording(task) | |
try: | |
self.process_command(command) | |
except Exception: | |
logging.exception("Exception running task") | |
- if command['record']: | |
+ if command["record"]: | |
self.wait_for_page_load() | |
- if not task['combine_steps'] or not len(task['script']): | |
+ if not task["combine_steps"] or not len(task["script"]): | |
self.on_stop_capture(task) | |
self.on_stop_recording(task) | |
recording = False | |
self.on_start_processing(task) | |
self.wait_for_processing(task) | |
self.step_complete(task) | |
- if task['log_data']: | |
+ if task["log_data"]: | |
# Move on to the next step | |
- task['current_step'] += 1 | |
+ task["current_step"] += 1 | |
self.event_name = None | |
- task['navigated'] = True | |
+ task["navigated"] = True | |
# Always navigate to about:blank after finishing in case the tab is | |
# remembered across sessions | |
try: | |
- self.marionette.navigate('about:blank') | |
- except Exception: | |
- logging.debug('Marionette exception navigating to about:blank after the test') | |
+ self.marionette.navigate("about:blank") | |
+ except Exception: | |
+ logging.debug( | |
+ "Marionette exception navigating to about:blank after the test" | |
+ ) | |
self.task = None | |
def wait_for_extension(self): | |
"""Wait for the extension to send the started message""" | |
- if self.job['message_server'] is not None: | |
+ if self.job["message_server"] is not None: | |
end_time = monotonic.monotonic() + 30 | |
while monotonic.monotonic() < end_time: | |
try: | |
- self.job['message_server'].get_message(1) | |
- logging.debug('Extension started') | |
+ self.job["message_server"].get_message(1) | |
+ logging.debug("Extension started") | |
self.connected = True | |
break | |
except Exception: | |
pass | |
def wait_for_page_load(self): | |
"""Wait for the onload event from the extension""" | |
- if self.job['message_server'] is not None and self.connected: | |
+ if self.job["message_server"] is not None and self.connected: | |
start_time = monotonic.monotonic() | |
- end_time = start_time + self.task['time_limit'] | |
+ end_time = start_time + self.task["time_limit"] | |
done = False | |
interval = 1 | |
while not done: | |
if self.page_loaded is not None: | |
interval = 0.1 | |
try: | |
- self.process_message(self.job['message_server'].get_message(interval)) | |
+ self.process_message( | |
+ self.job["message_server"].get_message(interval) | |
+ ) | |
except Exception: | |
pass | |
now = monotonic.monotonic() | |
elapsed_test = now - start_time | |
# Allow up to 5 seconds after a navigation for a re-navigation to happen | |
# (bizarre sequence Firefox seems to do) | |
if self.possible_navigation_error is not None: | |
- elapsed_error = now - self.possible_navigation_error['time'] | |
+ elapsed_error = now - self.possible_navigation_error["time"] | |
if elapsed_error > 5: | |
- self.nav_error = self.possible_navigation_error['error'] | |
+ self.nav_error = self.possible_navigation_error["error"] | |
if self.nav_error is not None: | |
- logging.debug('Navigation error') | |
+ logging.debug("Navigation error") | |
done = True | |
if self.page_loaded is None: | |
- logging.debug('Page not loaded') | |
- self.task['error'] = self.nav_error | |
- self.task['page_data']['result'] = 12999 | |
+ logging.debug("Page not loaded") | |
+ self.task["error"] = self.nav_error | |
+ self.task["page_data"]["result"] = 12999 | |
else: | |
- logging.debug('Page loaded') | |
+ logging.debug("Page loaded") | |
elif now >= end_time: | |
done = True | |
# only consider it an error if we didn't get a page load event | |
if self.page_loaded is None: | |
- self.task['error'] = "Page Load Timeout" | |
- self.task['page_data']['result'] = 99998 | |
- elif 'time' not in self.job or elapsed_test > self.job['time']: | |
+ self.task["error"] = "Page Load Timeout" | |
+ self.task["page_data"]["result"] = 99998 | |
+ elif "time" not in self.job or elapsed_test > self.job["time"]: | |
elapsed_activity = now - self.last_activity | |
- elapsed_page_load = now - self.page_loaded if self.page_loaded else 0 | |
- if elapsed_page_load >= 1 and elapsed_activity >= self.task['activity_time']: | |
+ elapsed_page_load = ( | |
+ now - self.page_loaded if self.page_loaded else 0 | |
+ ) | |
+ if ( | |
+ elapsed_page_load >= 1 | |
+ and elapsed_activity >= self.task["activity_time"] | |
+ ): | |
done = True | |
- elif self.task['error'] is not None: | |
+ elif self.task["error"] is not None: | |
done = True | |
def execute_js(self, script): | |
"""Run JavaScript""" | |
ret = None | |
if self.marionette is not None: | |
try: | |
- ret = self.marionette.execute_script('return ' + script, script_timeout=30) | |
+ ret = self.marionette.execute_script( | |
+ "return " + script, script_timeout=30 | |
+ ) | |
except Exception: | |
pass | |
return ret | |
def run_js_file(self, file_name): | |
"""Execute one of our JS scripts""" | |
ret = None | |
script = None | |
script_file_path = os.path.join(self.script_dir, file_name) | |
if os.path.isfile(script_file_path): | |
- with open(script_file_path, 'rb') as script_file: | |
+ with open(script_file_path, "rb") as script_file: | |
script = script_file.read() | |
if script is not None: | |
try: | |
- ret = self.marionette.execute_script('return ' + script, script_timeout=30) | |
+ ret = self.marionette.execute_script( | |
+ "return " + script, script_timeout=30 | |
+ ) | |
except Exception: | |
pass | |
if ret is not None: | |
logging.debug(ret) | |
return ret | |
def collect_browser_metrics(self, task): | |
"""Collect all of the in-page browser metrics that we need""" | |
logging.debug("Collecting user timing metrics") | |
- user_timing = self.run_js_file('user_timing.js') | |
+ user_timing = self.run_js_file("user_timing.js") | |
if user_timing is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join(task["dir"], task["prefix"] + "_timed_events.json.gz") | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(user_timing)) | |
logging.debug("Collecting page-level metrics") | |
- page_data = self.run_js_file('page_data.js') | |
+ page_data = self.run_js_file("page_data.js") | |
if page_data is not None: | |
- task['page_data'].update(page_data) | |
- if 'customMetrics' in self.job: | |
+ task["page_data"].update(page_data) | |
+ if "customMetrics" in self.job: | |
custom_metrics = {} | |
- for name in self.job['customMetrics']: | |
+ for name in self.job["customMetrics"]: | |
logging.debug("Collecting custom metric %s", name) | |
- script = 'var wptCustomMetric = function() {' +\ | |
- self.job['customMetrics'][name] +\ | |
- '};try{return wptCustomMetric();}catch(e){};' | |
+ script = ( | |
+ "var wptCustomMetric = function() {" | |
+ + self.job["customMetrics"][name] | |
+ + "};try{return wptCustomMetric();}catch(e){};" | |
+ ) | |
try: | |
- custom_metrics[name] = self.marionette.execute_script(script, script_timeout=30) | |
+ custom_metrics[name] = self.marionette.execute_script( | |
+ script, script_timeout=30 | |
+ ) | |
if custom_metrics[name] is not None: | |
logging.debug(custom_metrics[name]) | |
except Exception: | |
pass | |
- path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join(task["dir"], task["prefix"] + "_metrics.json.gz") | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(custom_metrics)) | |
def process_message(self, message): | |
"""Process a message from the extension""" | |
logging.debug(message) | |
if self.recording: | |
self.last_activity = monotonic.monotonic() | |
try: | |
# Make all of the timestamps relative to the test start to match the log events | |
- if 'timeStamp' in message['body']: | |
- timestamp = message['body']['timeStamp'] | |
+ if "timeStamp" in message["body"]: | |
+ timestamp = message["body"]["timeStamp"] | |
seconds = int(timestamp / 1000) | |
milliseconds = timestamp - (seconds * 1000) | |
event_time = datetime.utcfromtimestamp(seconds) | |
event_time += timedelta(milliseconds=milliseconds) | |
- elapsed = event_time - self.task['start_time'] | |
- message['body']['timeStamp'] = elapsed.total_seconds() | |
- cat, msg = message['path'].split('.', 1) | |
- if 'timeStamp' not in message['body'] or message['body']['timeStamp'] > 0: | |
- if cat == 'webNavigation': | |
- self.process_web_navigation(msg, message['body']) | |
- elif cat == 'webRequest': | |
- self.process_web_request(msg, message['body']) | |
+ elapsed = event_time - self.task["start_time"] | |
+ message["body"]["timeStamp"] = elapsed.total_seconds() | |
+ cat, msg = message["path"].split(".", 1) | |
+ if ( | |
+ "timeStamp" not in message["body"] | |
+ or message["body"]["timeStamp"] > 0 | |
+ ): | |
+ if cat == "webNavigation": | |
+ self.process_web_navigation(msg, message["body"]) | |
+ elif cat == "webRequest": | |
+ self.process_web_request(msg, message["body"]) | |
except Exception: | |
pass | |
def process_web_navigation(self, message, evt): | |
"""Handle webNavigation.*""" | |
if evt is not None: | |
- if message == 'onBeforeNavigate': | |
- if 'frameId' in evt and evt['frameId'] == 0: | |
+ if message == "onBeforeNavigate": | |
+ if "frameId" in evt and evt["frameId"] == 0: | |
self.page_loaded = None | |
self.possible_navigation_error = None | |
logging.debug("Starting navigation") | |
- if 'timeStamp' in evt and 'start' not in self.page: | |
- self.page['start'] = evt['timeStamp'] | |
- elif message == 'onCommitted': | |
- if 'timeStamp' in evt and 'frameId' in evt and evt['frameId'] == 0 \ | |
- and 'committed' not in self.page: | |
- self.page['committed'] = evt['timeStamp'] | |
- if 'injectScript' in self.job and self.marionette is not None: | |
- logging.debug("Injecting script: \n%s", self.job['injectScript']) | |
+ if "timeStamp" in evt and "start" not in self.page: | |
+ self.page["start"] = evt["timeStamp"] | |
+ elif message == "onCommitted": | |
+ if ( | |
+ "timeStamp" in evt | |
+ and "frameId" in evt | |
+ and evt["frameId"] == 0 | |
+ and "committed" not in self.page | |
+ ): | |
+ self.page["committed"] = evt["timeStamp"] | |
+ if "injectScript" in self.job and self.marionette is not None: | |
+ logging.debug("Injecting script: \n%s", self.job["injectScript"]) | |
try: | |
- self.marionette.execute_script(self.job['injectScript'], | |
- script_timeout=30) | |
+ self.marionette.execute_script( | |
+ self.job["injectScript"], script_timeout=30 | |
+ ) | |
except Exception: | |
pass | |
- elif message == 'onDOMContentLoaded': | |
- if 'timeStamp' in evt and 'frameId' in evt and evt['frameId'] == 0: | |
- self.page['DOMContentLoaded'] = evt['timeStamp'] | |
- elif message == 'onCompleted': | |
- if 'frameId' in evt and evt['frameId'] == 0: | |
+ elif message == "onDOMContentLoaded": | |
+ if "timeStamp" in evt and "frameId" in evt and evt["frameId"] == 0: | |
+ self.page["DOMContentLoaded"] = evt["timeStamp"] | |
+ elif message == "onCompleted": | |
+ if "frameId" in evt and evt["frameId"] == 0: | |
self.page_loaded = monotonic.monotonic() | |
logging.debug("Page loaded") | |
- if 'timeStamp' in evt: | |
- self.page['loaded'] = evt['timeStamp'] | |
- elif message == 'onErrorOccurred': | |
- if 'frameId' in evt and evt['frameId'] == 0: | |
+ if "timeStamp" in evt: | |
+ self.page["loaded"] = evt["timeStamp"] | |
+ elif message == "onErrorOccurred": | |
+ if "frameId" in evt and evt["frameId"] == 0: | |
logging.debug("Possible navigation error") | |
- err_msg = evt['error'] if 'error' in evt else 'Navigation failed' | |
+ err_msg = evt["error"] if "error" in evt else "Navigation failed" | |
self.possible_navigation_error = { | |
- 'time': monotonic.monotonic(), | |
- 'error': err_msg | |
+ "time": monotonic.monotonic(), | |
+ "error": err_msg, | |
} | |
def process_web_request(self, message, evt): | |
"""Handle webNavigation.*""" | |
- if evt is not None and 'requestId' in evt and 'timeStamp' in evt: | |
- if evt['requestId'] not in self.requests: | |
- self.requests[evt['requestId']] = {'id': evt['requestId'], | |
- 'from_net': True} | |
- request = self.requests[evt['requestId']] | |
- if 'url' in evt and evt['url'] is not None and 'url' not in request: | |
- request['url'] = evt['url'] | |
- if 'method' in evt and evt['method'] is not None and 'method' not in request: | |
- request['method'] = evt['method'] | |
- if 'type' in evt and evt['type'] is not None and 'type' not in request: | |
- request['type'] = evt['type'] | |
- if 'ip' in evt and evt['ip'] is not None and 'ip' not in request: | |
- request['ip'] = evt['ip'] | |
- if 'fromCache' in evt and evt['fromCache']: | |
- request['from_net'] = False | |
- if 'statusLine' in evt and evt['statusLine'] is not None: | |
- request['status_line'] = evt['statusLine'] | |
- if 'statusCode' in evt and evt['statusCode'] is not None: | |
- request['status'] = evt['statusCode'] | |
- if 'requestHeaders' in evt and evt['requestHeaders'] is not None and \ | |
- 'request_headers' not in request: | |
- request['request_headers'] = list(evt['requestHeaders']) | |
- if 'responseHeaders' in evt and evt['responseHeaders'] is not None and \ | |
- 'response_headers' not in request: | |
- request['response_headers'] = list(evt['responseHeaders']) | |
+ if evt is not None and "requestId" in evt and "timeStamp" in evt: | |
+ if evt["requestId"] not in self.requests: | |
+ self.requests[evt["requestId"]] = { | |
+ "id": evt["requestId"], | |
+ "from_net": True, | |
+ } | |
+ request = self.requests[evt["requestId"]] | |
+ if "url" in evt and evt["url"] is not None and "url" not in request: | |
+ request["url"] = evt["url"] | |
+ if ( | |
+ "method" in evt | |
+ and evt["method"] is not None | |
+ and "method" not in request | |
+ ): | |
+ request["method"] = evt["method"] | |
+ if "type" in evt and evt["type"] is not None and "type" not in request: | |
+ request["type"] = evt["type"] | |
+ if "ip" in evt and evt["ip"] is not None and "ip" not in request: | |
+ request["ip"] = evt["ip"] | |
+ if "fromCache" in evt and evt["fromCache"]: | |
+ request["from_net"] = False | |
+ if "statusLine" in evt and evt["statusLine"] is not None: | |
+ request["status_line"] = evt["statusLine"] | |
+ if "statusCode" in evt and evt["statusCode"] is not None: | |
+ request["status"] = evt["statusCode"] | |
+ if ( | |
+ "requestHeaders" in evt | |
+ and evt["requestHeaders"] is not None | |
+ and "request_headers" not in request | |
+ ): | |
+ request["request_headers"] = list(evt["requestHeaders"]) | |
+ if ( | |
+ "responseHeaders" in evt | |
+ and evt["responseHeaders"] is not None | |
+ and "response_headers" not in request | |
+ ): | |
+ request["response_headers"] = list(evt["responseHeaders"]) | |
if self.main_request_headers is None: | |
- self.main_request_headers = list(evt['responseHeaders']) | |
- | |
- if message == 'onBeforeRequest': | |
- request['created'] = evt['timeStamp'] | |
- elif message == 'onSendHeaders': | |
- request['start'] = evt['timeStamp'] | |
- elif message == 'onBeforeRedirect': | |
- if 'first_byte' not in request: | |
- request['first_byte'] = evt['timeStamp'] | |
- if 'end' not in request or evt['timeStamp'] > request['end']: | |
- request['end'] = evt['timeStamp'] | |
- elif message == 'onHeadersReceived': | |
- if 'first_byte' not in request: | |
- request['first_byte'] = evt['timeStamp'] | |
- if 'end' not in request or evt['timeStamp'] > request['end']: | |
- request['end'] = evt['timeStamp'] | |
- elif message == 'onResponseStarted': | |
- if 'first_byte' not in request: | |
- request['first_byte'] = evt['timeStamp'] | |
- if 'end' not in request or evt['timeStamp'] > request['end']: | |
- request['end'] = evt['timeStamp'] | |
- elif message == 'onCompleted': | |
- if 'first_byte' not in request: | |
- request['first_byte'] = evt['timeStamp'] | |
- if 'end' not in request or evt['timeStamp'] > request['end']: | |
- request['end'] = evt['timeStamp'] | |
- elif message == 'onErrorOccurred': | |
- if 'end' not in request or evt['timeStamp'] > request['end']: | |
- request['end'] = evt['timeStamp'] | |
- if 'error' in evt: | |
- request['error'] = evt['error'] | |
- if 'status' not in request: | |
- request['status'] = 12999 | |
+ self.main_request_headers = list(evt["responseHeaders"]) | |
+ | |
+ if message == "onBeforeRequest": | |
+ request["created"] = evt["timeStamp"] | |
+ elif message == "onSendHeaders": | |
+ request["start"] = evt["timeStamp"] | |
+ elif message == "onBeforeRedirect": | |
+ if "first_byte" not in request: | |
+ request["first_byte"] = evt["timeStamp"] | |
+ if "end" not in request or evt["timeStamp"] > request["end"]: | |
+ request["end"] = evt["timeStamp"] | |
+ elif message == "onHeadersReceived": | |
+ if "first_byte" not in request: | |
+ request["first_byte"] = evt["timeStamp"] | |
+ if "end" not in request or evt["timeStamp"] > request["end"]: | |
+ request["end"] = evt["timeStamp"] | |
+ elif message == "onResponseStarted": | |
+ if "first_byte" not in request: | |
+ request["first_byte"] = evt["timeStamp"] | |
+ if "end" not in request or evt["timeStamp"] > request["end"]: | |
+ request["end"] = evt["timeStamp"] | |
+ elif message == "onCompleted": | |
+ if "first_byte" not in request: | |
+ request["first_byte"] = evt["timeStamp"] | |
+ if "end" not in request or evt["timeStamp"] > request["end"]: | |
+ request["end"] = evt["timeStamp"] | |
+ elif message == "onErrorOccurred": | |
+ if "end" not in request or evt["timeStamp"] > request["end"]: | |
+ request["end"] = evt["timeStamp"] | |
+ if "error" in evt: | |
+ request["error"] = evt["error"] | |
+ if "status" not in request: | |
+ request["status"] = 12999 | |
def prepare_task(self, task): | |
"""Format the file prefixes for multi-step testing""" | |
- if task['current_step'] == 1: | |
- task['prefix'] = task['task_prefix'] | |
- task['video_subdirectory'] = task['task_video_prefix'] | |
+ if task["current_step"] == 1: | |
+ task["prefix"] = task["task_prefix"] | |
+ task["video_subdirectory"] = task["task_video_prefix"] | |
else: | |
- task['prefix'] = '{0}_{1:d}'.format(task['task_prefix'], task['current_step']) | |
- task['video_subdirectory'] = '{0}_{1:d}'.format(task['task_video_prefix'], | |
- task['current_step']) | |
- if task['video_subdirectory'] not in task['video_directories']: | |
- task['video_directories'].append(task['video_subdirectory']) | |
+ task["prefix"] = "{0}_{1:d}".format( | |
+ task["task_prefix"], task["current_step"] | |
+ ) | |
+ task["video_subdirectory"] = "{0}_{1:d}".format( | |
+ task["task_video_prefix"], task["current_step"] | |
+ ) | |
+ if task["video_subdirectory"] not in task["video_directories"]: | |
+ task["video_directories"].append(task["video_subdirectory"]) | |
if self.event_name is not None: | |
- task['step_name'] = self.event_name | |
+ task["step_name"] = self.event_name | |
else: | |
- task['step_name'] = 'Step_{0:d}'.format(task['current_step']) | |
+ task["step_name"] = "Step_{0:d}".format(task["current_step"]) | |
def on_start_recording(self, task): | |
"""Notification that we are about to start an operation that needs to be recorded""" | |
# Clear the state | |
self.page = {} | |
self.requests = {} | |
- task['page_data'] = {'date': time.time()} | |
- task['page_result'] = None | |
- task['run_start_time'] = monotonic.monotonic() | |
- if self.browser_version is not None and 'browserVersion' not in task['page_data']: | |
- task['page_data']['browserVersion'] = self.browser_version | |
- task['page_data']['browser_version'] = self.browser_version | |
+ task["page_data"] = {"date": time.time()} | |
+ task["page_result"] = None | |
+ task["run_start_time"] = monotonic.monotonic() | |
+ if ( | |
+ self.browser_version is not None | |
+ and "browserVersion" not in task["page_data"] | |
+ ): | |
+ task["page_data"]["browserVersion"] = self.browser_version | |
+ task["page_data"]["browser_version"] = self.browser_version | |
# Mark the start point in the various log files | |
self.log_pos = {} | |
if self.moz_log is not None: | |
- files = sorted(glob.glob(self.moz_log + '*')) | |
+ files = sorted(glob.glob(self.moz_log + "*")) | |
for path in files: | |
self.log_pos[path] = os.path.getsize(path) | |
self.recording = True | |
now = monotonic.monotonic() | |
- if not self.task['stop_at_onload']: | |
+ if not self.task["stop_at_onload"]: | |
self.last_activity = now | |
if self.page_loaded is not None: | |
self.page_loaded = now | |
DesktopBrowser.on_start_recording(self, task) | |
- logging.debug('Starting measurement') | |
- task['start_time'] = datetime.utcnow() | |
+ logging.debug("Starting measurement") | |
+ task["start_time"] = datetime.utcnow() | |
def on_stop_capture(self, task): | |
"""Do any quick work to stop things that are capturing data""" | |
DesktopBrowser.on_stop_capture(self, task) | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
hero_elements = None | |
custom_hero_selectors = {} | |
- if 'heroElements' in self.job: | |
- custom_hero_selectors = self.job['heroElements'] | |
- logging.debug('Collecting hero element positions') | |
- with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: | |
+ if "heroElements" in self.job: | |
+ custom_hero_selectors = self.job["heroElements"] | |
+ logging.debug("Collecting hero element positions") | |
+ with open( | |
+ os.path.join(self.script_dir, "hero_elements.js"), "rb" | |
+ ) as script_file: | |
hero_elements_script = script_file.read() | |
- script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' | |
+ script = ( | |
+ hero_elements_script + "(" + json.dumps(custom_hero_selectors) + ")" | |
+ ) | |
hero_elements = self.execute_js(script) | |
if hero_elements is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join( | |
+ task["dir"], task["prefix"] + "_hero_elements.json.gz" | |
+ ) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(hero_elements)) | |
def on_stop_recording(self, task): | |
"""Notification that we are done with recording""" | |
self.recording = False | |
DesktopBrowser.on_stop_recording(self, task) | |
if self.connected: | |
- if self.job['pngScreenShot']: | |
- screen_shot = os.path.join(task['dir'], task['prefix'] + '_screen.png') | |
+ if self.job["pngScreenShot"]: | |
+ screen_shot = os.path.join(task["dir"], task["prefix"] + "_screen.png") | |
self.grab_screenshot(screen_shot, png=True) | |
else: | |
- screen_shot = os.path.join(task['dir'], task['prefix'] + '_screen.jpg') | |
+ screen_shot = os.path.join(task["dir"], task["prefix"] + "_screen.jpg") | |
self.grab_screenshot(screen_shot, png=False, resize=600) | |
# Collect end of test data from the browser | |
self.collect_browser_metrics(task) | |
# Collect the interactive periods | |
- interactive = self.execute_js('window.wrappedJSObject.wptagentGetInteractivePeriods();') | |
+ interactive = self.execute_js( | |
+ "window.wrappedJSObject.wptagentGetInteractivePeriods();" | |
+ ) | |
if interactive is not None and len(interactive): | |
- interactive_file = os.path.join(task['dir'], task['prefix'] + '_interactive.json.gz') | |
- with gzip.open(interactive_file, 'wb', 7) as f_out: | |
+ interactive_file = os.path.join( | |
+ task["dir"], task["prefix"] + "_interactive.json.gz" | |
+ ) | |
+ with gzip.open(interactive_file, "wb", 7) as f_out: | |
f_out.write(interactive) | |
# Close the browser if we are done testing (helps flush logs) | |
- if not len(task['script']): | |
+ if not len(task["script"]): | |
self.close_browser(self.job, task) | |
# Copy the log files | |
if self.moz_log is not None: | |
- task['moz_log'] = os.path.join(task['dir'], task['prefix'] + '_moz.log') | |
- files = sorted(glob.glob(self.moz_log + '*')) | |
+ task["moz_log"] = os.path.join(task["dir"], task["prefix"] + "_moz.log") | |
+ files = sorted(glob.glob(self.moz_log + "*")) | |
for path in files: | |
try: | |
base_name = os.path.basename(path) | |
- dest = os.path.join(task['dir'], | |
- task['prefix'] + '_' + base_name + '.gz') | |
+ dest = os.path.join( | |
+ task["dir"], task["prefix"] + "_" + base_name + ".gz" | |
+ ) | |
start_pos = self.log_pos[path] if path in self.log_pos else 0 | |
end_pos = os.path.getsize(path) | |
if end_pos > start_pos: | |
length = end_pos - start_pos | |
- logging.debug('Preparing moz log %s (%d bytes from %d)', | |
- base_name, length, start_pos) | |
- with open(path, 'rb') as f_in: | |
+ logging.debug( | |
+ "Preparing moz log %s (%d bytes from %d)", | |
+ base_name, | |
+ length, | |
+ start_pos, | |
+ ) | |
+ with open(path, "rb") as f_in: | |
f_in.seek(start_pos) | |
- with gzip.open(dest, 'wb', 7) as f_out: | |
+ with gzip.open(dest, "wb", 7) as f_out: | |
while length > 0: | |
read_bytes = min(length, 1024 * 1024) | |
buff = f_in.read(read_bytes) | |
read_bytes = len(buff) | |
f_out.write(buff) | |
@@ -663,31 +755,41 @@ | |
def on_start_processing(self, task): | |
"""Start any processing of the captured data""" | |
DesktopBrowser.on_start_processing(self, task) | |
# Parse the moz log for the accurate request timings | |
request_timings = [] | |
- if 'moz_log' in task: | |
+ if "moz_log" in task: | |
from internal.support.firefox_log_parser import FirefoxLogParser | |
+ | |
parser = FirefoxLogParser() | |
- start_time = task['start_time'].strftime('%Y-%m-%d %H:%M:%S.%f') | |
- logging.debug('Parsing moz logs relative to %s start time', start_time) | |
- request_timings = parser.process_logs(task['moz_log'], start_time) | |
- files = sorted(glob.glob(task['moz_log'] + '*')) | |
+ start_time = task["start_time"].strftime("%Y-%m-%d %H:%M:%S.%f") | |
+ logging.debug("Parsing moz logs relative to %s start time", start_time) | |
+ request_timings = parser.process_logs(task["moz_log"], start_time) | |
+ files = sorted(glob.glob(task["moz_log"] + "*")) | |
for path in files: | |
try: | |
os.remove(path) | |
except Exception: | |
pass | |
# Build the request and page data | |
- if len(request_timings) and task['current_step'] >= 1: | |
+ if len(request_timings) and task["current_step"] >= 1: | |
self.adjust_timings(request_timings) | |
self.process_requests(request_timings, task) | |
def adjust_timings(self, requests): | |
"""Adjust the request timings to start at zero for the earliest timestamp""" | |
- timestamps = ['dns_start', 'dns_end', 'connect_start', 'connect_end', | |
- 'ssl_start', 'ssl_end', 'start', 'first_byte', 'end'] | |
+ timestamps = [ | |
+ "dns_start", | |
+ "dns_end", | |
+ "connect_start", | |
+ "connect_end", | |
+ "ssl_start", | |
+ "ssl_end", | |
+ "start", | |
+ "first_byte", | |
+ "end", | |
+ ] | |
earliest = None | |
for request in requests: | |
for entry in timestamps: | |
if entry in request and request[entry] >= 0: | |
if earliest is None or request[entry] < earliest: | |
@@ -697,79 +799,86 @@ | |
self.start_offset = earliest | |
for request in requests: | |
for entry in timestamps: | |
if entry in request and request[entry] >= earliest: | |
request[entry] -= earliest | |
- if 'chunks' in request: | |
- for chunk in request['chunks']: | |
- if 'ts' in chunk and chunk['ts'] >= earliest: | |
- chunk['ts'] -= earliest | |
+ if "chunks" in request: | |
+ for chunk in request["chunks"]: | |
+ if "ts" in chunk and chunk["ts"] >= earliest: | |
+ chunk["ts"] -= earliest | |
def wait_for_processing(self, task): | |
"""Wait for any background processing threads to finish""" | |
DesktopBrowser.wait_for_processing(self, task) | |
def process_command(self, command): | |
"""Process an individual script command""" | |
logging.debug("Processing script command:") | |
logging.debug(command) | |
- if command['command'] == 'navigate': | |
- self.task['page_data']['URL'] = command['target'] | |
- url = str(command['target']).replace('"', '\"') | |
+ if command["command"] == "navigate": | |
+ self.task["page_data"]["URL"] = command["target"] | |
+ url = str(command["target"]).replace('"', '"') | |
script = 'window.location="{0}";'.format(url) | |
script = self.prepare_script_for_record(script) | |
self.marionette.execute_script(script) | |
- elif command['command'] == 'logdata': | |
- self.task['combine_steps'] = False | |
- if int(re.search(r'\d+', str(command['target'])).group()): | |
+ elif command["command"] == "logdata": | |
+ self.task["combine_steps"] = False | |
+ if int(re.search(r"\d+", str(command["target"])).group()): | |
logging.debug("Data logging enabled") | |
- self.task['log_data'] = True | |
+ self.task["log_data"] = True | |
else: | |
logging.debug("Data logging disabled") | |
- self.task['log_data'] = False | |
- elif command['command'] == 'combinesteps': | |
- self.task['log_data'] = True | |
- self.task['combine_steps'] = True | |
- elif command['command'] == 'seteventname': | |
- self.event_name = command['target'] | |
- elif command['command'] == 'exec': | |
- script = command['target'] | |
- if command['record']: | |
+ self.task["log_data"] = False | |
+ elif command["command"] == "combinesteps": | |
+ self.task["log_data"] = True | |
+ self.task["combine_steps"] = True | |
+ elif command["command"] == "seteventname": | |
+ self.event_name = command["target"] | |
+ elif command["command"] == "exec": | |
+ script = command["target"] | |
+ if command["record"]: | |
script = self.prepare_script_for_record(script) | |
self.marionette.execute_script(script) | |
- elif command['command'] == 'sleep': | |
- delay = min(60, max(0, int(re.search(r'\d+', str(command['target'])).group()))) | |
+ elif command["command"] == "sleep": | |
+ delay = min( | |
+ 60, max(0, int(re.search(r"\d+", str(command["target"])).group())) | |
+ ) | |
if delay > 0: | |
time.sleep(delay) | |
- elif command['command'] == 'setabm': | |
- self.task['stop_at_onload'] = \ | |
- bool('target' in command and int(re.search(r'\d+', | |
- str(command['target'])).group()) == 0) | |
- elif command['command'] == 'setactivitytimeout': | |
- if 'target' in command: | |
- milliseconds = int(re.search(r'\d+', str(command['target'])).group()) | |
- self.task['activity_time'] = max(0, min(30, float(milliseconds) / 1000.0)) | |
- elif command['command'] == 'setuseragent': | |
- self.task['user_agent_string'] = command['target'] | |
- elif command['command'] == 'firefoxpref': | |
- if 'target' in command and 'value' in command: | |
- self.set_pref(command['target'], command['value']) | |
- elif command['command'] == 'setlocation': | |
- try: | |
- if 'target' in command and command['target'].find(',') > 0: | |
+ elif command["command"] == "setabm": | |
+ self.task["stop_at_onload"] = bool( | |
+ "target" in command | |
+ and int(re.search(r"\d+", str(command["target"])).group()) == 0 | |
+ ) | |
+ elif command["command"] == "setactivitytimeout": | |
+ if "target" in command: | |
+ milliseconds = int(re.search(r"\d+", str(command["target"])).group()) | |
+ self.task["activity_time"] = max( | |
+ 0, min(30, float(milliseconds) / 1000.0) | |
+ ) | |
+ elif command["command"] == "setuseragent": | |
+ self.task["user_agent_string"] = command["target"] | |
+ elif command["command"] == "firefoxpref": | |
+ if "target" in command and "value" in command: | |
+ self.set_pref(command["target"], command["value"]) | |
+ elif command["command"] == "setlocation": | |
+ try: | |
+ if "target" in command and command["target"].find(",") > 0: | |
accuracy = 0 | |
- if 'value' in command and re.match(r'\d+', command['value']): | |
- accuracy = int(re.search(r'\d+', str(command['value'])).group()) | |
- parts = command['target'].split(',') | |
+ if "value" in command and re.match(r"\d+", command["value"]): | |
+ accuracy = int(re.search(r"\d+", str(command["value"])).group()) | |
+ parts = command["target"].split(",") | |
lat = float(parts[0]) | |
lng = float(parts[1]) | |
- location_uri = 'data:application/json,{{'\ | |
- '"status":"OK","accuracy":{2:d},'\ | |
- '"location":{{"lat":{0:f},"lng":{1:f}}}'\ | |
- '}}'.format(lat, lng, accuracy) | |
- logging.debug('Setting location: %s', location_uri) | |
- self.set_pref('geo.wifi.uri', location_uri) | |
+ location_uri = ( | |
+ "data:application/json,{{" | |
+ '"status":"OK","accuracy":{2:d},' | |
+ '"location":{{"lat":{0:f},"lng":{1:f}}}' | |
+ "}}".format(lat, lng, accuracy) | |
+ ) | |
+ logging.debug("Setting location: %s", location_uri) | |
+ self.set_pref("geo.wifi.uri", location_uri) | |
except Exception: | |
pass | |
def navigate(self, url): | |
"""Navigate to the given URL""" | |
@@ -791,328 +900,376 @@ | |
def grab_screenshot(self, path, png=True, resize=0): | |
"""Save the screen shot (png or jpeg)""" | |
if self.marionette is not None: | |
try: | |
- data = self.marionette.screenshot(format='binary', full=False) | |
+ data = self.marionette.screenshot(format="binary", full=False) | |
if data is not None: | |
- resize_string = '' if not resize else '-resize {0:d}x{0:d} '.format(resize) | |
+ resize_string = ( | |
+ "" if not resize else "-resize {0:d}x{0:d} ".format(resize) | |
+ ) | |
if png: | |
- with open(path, 'wb') as image_file: | |
+ with open(path, "wb") as image_file: | |
image_file.write(data) | |
if len(resize_string): | |
- cmd = '{0} -format png -define png:color-type=2 '\ | |
- '-depth 8 {1}"{2}"'.format(self.job['image_magick']['mogrify'], | |
- resize_string, path) | |
+ cmd = ( | |
+ "{0} -format png -define png:color-type=2 " | |
+ '-depth 8 {1}"{2}"'.format( | |
+ self.job["image_magick"]["mogrify"], | |
+ resize_string, | |
+ path, | |
+ ) | |
+ ) | |
logging.debug(cmd) | |
subprocess.call(cmd, shell=True) | |
else: | |
- tmp_file = path + '.png' | |
- with open(tmp_file, 'wb') as image_file: | |
+ tmp_file = path + ".png" | |
+ with open(tmp_file, "wb") as image_file: | |
image_file.write(data) | |
command = '{0} "{1}" {2}-quality {3:d} "{4}"'.format( | |
- self.job['image_magick']['convert'], | |
- tmp_file, resize_string, self.job['imageQuality'], path) | |
+ self.job["image_magick"]["convert"], | |
+ tmp_file, | |
+ resize_string, | |
+ self.job["imageQuality"], | |
+ path, | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(tmp_file): | |
try: | |
os.remove(tmp_file) | |
except Exception: | |
pass | |
except Exception as err: | |
- logging.debug('Exception grabbing screen shot: %s', str(err)) | |
+ logging.debug("Exception grabbing screen shot: %s", str(err)) | |
def process_requests(self, request_timings, task): | |
"""Convert all of the request and page events into the format needed for WPT""" | |
result = {} | |
- result['requests'] = self.merge_requests(request_timings) | |
- result['pageData'] = self.calculate_page_stats(result['requests']) | |
- devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') | |
- with gzip.open(devtools_file, 'wb', 7) as f_out: | |
+ result["requests"] = self.merge_requests(request_timings) | |
+ result["pageData"] = self.calculate_page_stats(result["requests"]) | |
+ devtools_file = os.path.join( | |
+ task["dir"], task["prefix"] + "_devtools_requests.json.gz" | |
+ ) | |
+ with gzip.open(devtools_file, "wb", 7) as f_out: | |
json.dump(result, f_out) | |
def get_empty_request(self, request_id, url): | |
"""Return and empty, initialized request""" | |
parts = urlparse.urlsplit(url) | |
- request = {'type': 3, | |
- 'id': request_id, | |
- 'request_id': request_id, | |
- 'ip_addr': '', | |
- 'full_url': url, | |
- 'is_secure': 1 if parts.scheme == 'https' else 0, | |
- 'method': '', | |
- 'host': parts.netloc, | |
- 'url': parts.path, | |
- 'responseCode': -1, | |
- 'load_start': -1, | |
- 'load_ms': -1, | |
- 'ttfb_ms': -1, | |
- 'dns_start': -1, | |
- 'dns_end': -1, | |
- 'dns_ms': -1, | |
- 'connect_start': -1, | |
- 'connect_end': -1, | |
- 'connect_ms': -1, | |
- 'ssl_start': -1, | |
- 'ssl_end': -1, | |
- 'ssl_ms': -1, | |
- 'bytesIn': 0, | |
- 'bytesOut': 0, | |
- 'objectSize': 0, | |
- 'initiator': '', | |
- 'initiator_line': '', | |
- 'initiator_column': '', | |
- 'server_rtt': None, | |
- 'headers': {'request': [], 'response': []}, | |
- 'score_cache': -1, | |
- 'score_cdn': -1, | |
- 'score_gzip': -1, | |
- 'score_cookies': -1, | |
- 'score_keep-alive': -1, | |
- 'score_minify': -1, | |
- 'score_combine': -1, | |
- 'score_compress': -1, | |
- 'score_etags': -1, | |
- 'gzip_total': None, | |
- 'gzip_save': None, | |
- 'minify_total': None, | |
- 'minify_save': None, | |
- 'image_total': None, | |
- 'image_save': None, | |
- 'cache_time': None, | |
- 'cdn_provider': None, | |
- 'server_count': None, | |
- 'socket': -1 | |
- } | |
+ request = { | |
+ "type": 3, | |
+ "id": request_id, | |
+ "request_id": request_id, | |
+ "ip_addr": "", | |
+ "full_url": url, | |
+ "is_secure": 1 if parts.scheme == "https" else 0, | |
+ "method": "", | |
+ "host": parts.netloc, | |
+ "url": parts.path, | |
+ "responseCode": -1, | |
+ "load_start": -1, | |
+ "load_ms": -1, | |
+ "ttfb_ms": -1, | |
+ "dns_start": -1, | |
+ "dns_end": -1, | |
+ "dns_ms": -1, | |
+ "connect_start": -1, | |
+ "connect_end": -1, | |
+ "connect_ms": -1, | |
+ "ssl_start": -1, | |
+ "ssl_end": -1, | |
+ "ssl_ms": -1, | |
+ "bytesIn": 0, | |
+ "bytesOut": 0, | |
+ "objectSize": 0, | |
+ "initiator": "", | |
+ "initiator_line": "", | |
+ "initiator_column": "", | |
+ "server_rtt": None, | |
+ "headers": {"request": [], "response": []}, | |
+ "score_cache": -1, | |
+ "score_cdn": -1, | |
+ "score_gzip": -1, | |
+ "score_cookies": -1, | |
+ "score_keep-alive": -1, | |
+ "score_minify": -1, | |
+ "score_combine": -1, | |
+ "score_compress": -1, | |
+ "score_etags": -1, | |
+ "gzip_total": None, | |
+ "gzip_save": None, | |
+ "minify_total": None, | |
+ "minify_save": None, | |
+ "image_total": None, | |
+ "image_save": None, | |
+ "cache_time": None, | |
+ "cdn_provider": None, | |
+ "server_count": None, | |
+ "socket": -1, | |
+ } | |
if len(parts.query): | |
- request['url'] += '?' + parts.query | |
+ request["url"] += "?" + parts.query | |
return request | |
def get_header_value(self, headers, name): | |
"""Return the value for the given header""" | |
- value = '' | |
+ value = "" | |
name = name.lower() | |
for header in headers: | |
- pos = header.find(':') | |
+ pos = header.find(":") | |
if pos > 0: | |
key = header[0:pos].lower() | |
if key.startswith(name): | |
- val = header[pos + 1:].strip() | |
+ val = header[pos + 1 :].strip() | |
if len(value): | |
- value += '; ' | |
+ value += "; " | |
value += val | |
return value | |
def merge_requests(self, request_timings): | |
"""Merge the requests from the extension and log files""" | |
requests = [] | |
# Start with the requests reported from the extension | |
for req_id in self.requests: | |
try: | |
req = self.requests[req_id] | |
- if req['from_net'] and 'start' in req and 'url' in req: | |
- request = self.get_empty_request(req['id'], req['url']) | |
- if 'ip' in req: | |
- request['ip_addr'] = req['ip'] | |
- if 'method' in req: | |
- request['method'] = req['method'] | |
- if 'status' in req: | |
- request['responseCode'] = req['status'] | |
- if 'type' in req: | |
- request['requestType'] = req['type'] | |
- if 'request_headers' in req: | |
- for header in req['request_headers']: | |
- if 'name' in header and 'value' in header: | |
- header_text = '{0}: {1}'.format(header['name'], header['value']) | |
- request['bytesOut'] += len(header_text) + 2 | |
- request['headers']['request'].append(header_text) | |
- if 'status_line' in req: | |
- request['bytesIn'] += len(req['status_line']) + 2 | |
- request['headers']['response'].append(req['status_line']) | |
- if 'response_headers' in req: | |
- for header in req['response_headers']: | |
- if 'name' in header and 'value' in header: | |
+ if req["from_net"] and "start" in req and "url" in req: | |
+ request = self.get_empty_request(req["id"], req["url"]) | |
+ if "ip" in req: | |
+ request["ip_addr"] = req["ip"] | |
+ if "method" in req: | |
+ request["method"] = req["method"] | |
+ if "status" in req: | |
+ request["responseCode"] = req["status"] | |
+ if "type" in req: | |
+ request["requestType"] = req["type"] | |
+ if "request_headers" in req: | |
+ for header in req["request_headers"]: | |
+ if "name" in header and "value" in header: | |
+ header_text = "{0}: {1}".format( | |
+ header["name"], header["value"] | |
+ ) | |
+ request["bytesOut"] += len(header_text) + 2 | |
+ request["headers"]["request"].append(header_text) | |
+ if "status_line" in req: | |
+ request["bytesIn"] += len(req["status_line"]) + 2 | |
+ request["headers"]["response"].append(req["status_line"]) | |
+ if "response_headers" in req: | |
+ for header in req["response_headers"]: | |
+ if "name" in header and "value" in header: | |
try: | |
- header_text = '{0}: {1}'.format(header['name'], header['value']) | |
- request['bytesIn'] += len(header_text) + 2 | |
- request['headers']['response'].append(header_text) | |
+ header_text = "{0}: {1}".format( | |
+ header["name"], header["value"] | |
+ ) | |
+ request["bytesIn"] += len(header_text) + 2 | |
+ request["headers"]["response"].append(header_text) | |
except Exception: | |
pass | |
- if 'created' in req: | |
- request['created'] = req['created'] | |
- request['load_start'] = int(round(req['start'] * 1000.0)) | |
- if 'first_byte' in req: | |
- ttfb = int(round((req['first_byte'] - req['start']) * 1000.0)) | |
- request['ttfb_ms'] = max(0, ttfb) | |
- if 'end' in req: | |
- load_time = int(round((req['end'] - req['start']) * 1000.0)) | |
- request['load_ms'] = max(0, load_time) | |
- size = self.get_header_value(request['headers']['response'], 'Content-Length') | |
+ if "created" in req: | |
+ request["created"] = req["created"] | |
+ request["load_start"] = int(round(req["start"] * 1000.0)) | |
+ if "first_byte" in req: | |
+ ttfb = int(round((req["first_byte"] - req["start"]) * 1000.0)) | |
+ request["ttfb_ms"] = max(0, ttfb) | |
+ if "end" in req: | |
+ load_time = int(round((req["end"] - req["start"]) * 1000.0)) | |
+ request["load_ms"] = max(0, load_time) | |
+ size = self.get_header_value( | |
+ request["headers"]["response"], "Content-Length" | |
+ ) | |
if len(size): | |
- request['bytesIn'] += int(re.search(r'\d+', str(size)).group()) | |
+ request["bytesIn"] += int(re.search(r"\d+", str(size)).group()) | |
requests.append(request) | |
except Exception: | |
pass | |
# Overwrite them with the same requests from the logs | |
for request in requests: | |
for req in request_timings: | |
try: | |
- if 'claimed' not in req and 'url' in req and 'full_url' in request \ | |
- and 'start' in req and request['full_url'] == req['url']: | |
- req['claimed'] = True | |
+ if ( | |
+ "claimed" not in req | |
+ and "url" in req | |
+ and "full_url" in request | |
+ and "start" in req | |
+ and request["full_url"] == req["url"] | |
+ ): | |
+ req["claimed"] = True | |
self.populate_request(request, req) | |
except Exception: | |
pass | |
# Add any events from the logs that weren't reported by the extension | |
for req in request_timings: | |
try: | |
- if 'claimed' not in req and 'url' in req and 'start' in req: | |
- request = self.get_empty_request(req['id'], req['url']) | |
+ if "claimed" not in req and "url" in req and "start" in req: | |
+ request = self.get_empty_request(req["id"], req["url"]) | |
self.populate_request(request, req) | |
requests.append(request) | |
except Exception: | |
pass | |
# parse values out of the headers | |
for request in requests: | |
try: | |
- value = self.get_header_value(request['headers']['response'], 'Expires') | |
+ value = self.get_header_value(request["headers"]["response"], "Expires") | |
if value: | |
- request['expires'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Cache-Control') | |
+ request["expires"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Cache-Control" | |
+ ) | |
if value: | |
- request['cacheControl'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Content-Type') | |
+ request["cacheControl"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Content-Type" | |
+ ) | |
if value: | |
- request['contentType'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Content-Encoding') | |
+ request["contentType"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Content-Encoding" | |
+ ) | |
if value: | |
- request['contentEncoding'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Content-Length') | |
+ request["contentEncoding"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Content-Length" | |
+ ) | |
if value: | |
- request['objectSize'] = value | |
- except Exception: | |
- pass | |
- requests.sort(key=lambda x: x['load_start']) | |
+ request["objectSize"] = value | |
+ except Exception: | |
+ pass | |
+ requests.sort(key=lambda x: x["load_start"]) | |
return requests | |
def populate_request(self, request, log_request): | |
"""Populate a request object from the log request values""" | |
- request['load_start'] = int(log_request['start'] * 1000) | |
- if 'status' in log_request: | |
- request['responseCode'] = log_request['status'] | |
- if 'dns_start' in log_request and log_request['dns_start'] >= 0: | |
- request['dns_start'] = int(log_request['dns_start'] * 1000) | |
- if 'dns_end' in log_request and log_request['dns_end'] >= 0: | |
- request['dns_end'] = int(round(log_request['dns_end'] * 1000.0)) | |
- if 'connect_start' in log_request and log_request['connect_start'] >= 0: | |
- request['connect_start'] = int(log_request['connect_start'] * 1000) | |
- if 'connect_end' in log_request and log_request['connect_end'] >= 0: | |
- request['connect_end'] = int(round(log_request['connect_end'] * 1000.0)) | |
- if 'ssl_start' in log_request and log_request['ssl_start'] >= 0: | |
- request['ssl_start'] = int(log_request['ssl_start'] * 1000) | |
- if 'ssl_end' in log_request and log_request['ssl_end'] >= 0: | |
- request['ssl_end'] = int(round(log_request['ssl_end'] * 1000.0)) | |
- if 'connection' in log_request: | |
- request['socket'] = log_request['connection'] | |
- request['load_start'] = int(round(log_request['start'] * 1000.0)) | |
- if 'first_byte' in log_request: | |
- request['ttfb_ms'] = int(round((log_request['first_byte'] - | |
- log_request['start']) * 1000.0)) | |
- if 'end' in log_request: | |
- request['load_ms'] = int(round((log_request['end'] - | |
- log_request['start']) * 1000.0)) | |
- if 'bytes_in' in log_request: | |
- request['bytesIn'] = log_request['bytes_in'] | |
- if 'chunks' in log_request and len(log_request['chunks']): | |
- request['chunks'] = [] | |
- for chunk in log_request['chunks']: | |
- ts = chunk['ts'] * 1000.0 | |
- request['chunks'].append({'ts': ts, 'bytes': chunk['bytes']}) | |
- if 'request_headers' in log_request: | |
- request['headers']['request'] = list(log_request['request_headers']) | |
- if 'response_headers' in log_request: | |
- request['headers']['response'] = list(log_request['response_headers']) | |
- if 'http2_stream_id' in log_request: | |
- request['http2_stream_id'] = log_request['http2_stream_id'] | |
- request['protocol'] = 'HTTP/2' | |
- if 'http2_stream_dependency' in log_request: | |
- request['http2_stream_dependency'] = log_request['http2_stream_dependency'] | |
- if 'http2_stream_weight' in log_request: | |
- request['http2_stream_weight'] = log_request['http2_stream_weight'] | |
+ request["load_start"] = int(log_request["start"] * 1000) | |
+ if "status" in log_request: | |
+ request["responseCode"] = log_request["status"] | |
+ if "dns_start" in log_request and log_request["dns_start"] >= 0: | |
+ request["dns_start"] = int(log_request["dns_start"] * 1000) | |
+ if "dns_end" in log_request and log_request["dns_end"] >= 0: | |
+ request["dns_end"] = int(round(log_request["dns_end"] * 1000.0)) | |
+ if "connect_start" in log_request and log_request["connect_start"] >= 0: | |
+ request["connect_start"] = int(log_request["connect_start"] * 1000) | |
+ if "connect_end" in log_request and log_request["connect_end"] >= 0: | |
+ request["connect_end"] = int(round(log_request["connect_end"] * 1000.0)) | |
+ if "ssl_start" in log_request and log_request["ssl_start"] >= 0: | |
+ request["ssl_start"] = int(log_request["ssl_start"] * 1000) | |
+ if "ssl_end" in log_request and log_request["ssl_end"] >= 0: | |
+ request["ssl_end"] = int(round(log_request["ssl_end"] * 1000.0)) | |
+ if "connection" in log_request: | |
+ request["socket"] = log_request["connection"] | |
+ request["load_start"] = int(round(log_request["start"] * 1000.0)) | |
+ if "first_byte" in log_request: | |
+ request["ttfb_ms"] = int( | |
+ round((log_request["first_byte"] - log_request["start"]) * 1000.0) | |
+ ) | |
+ if "end" in log_request: | |
+ request["load_ms"] = int( | |
+ round((log_request["end"] - log_request["start"]) * 1000.0) | |
+ ) | |
+ if "bytes_in" in log_request: | |
+ request["bytesIn"] = log_request["bytes_in"] | |
+ if "chunks" in log_request and len(log_request["chunks"]): | |
+ request["chunks"] = [] | |
+ for chunk in log_request["chunks"]: | |
+ ts = chunk["ts"] * 1000.0 | |
+ request["chunks"].append({"ts": ts, "bytes": chunk["bytes"]}) | |
+ if "request_headers" in log_request: | |
+ request["headers"]["request"] = list(log_request["request_headers"]) | |
+ if "response_headers" in log_request: | |
+ request["headers"]["response"] = list(log_request["response_headers"]) | |
+ if "http2_stream_id" in log_request: | |
+ request["http2_stream_id"] = log_request["http2_stream_id"] | |
+ request["protocol"] = "HTTP/2" | |
+ if "http2_stream_dependency" in log_request: | |
+ request["http2_stream_dependency"] = log_request["http2_stream_dependency"] | |
+ if "http2_stream_weight" in log_request: | |
+ request["http2_stream_weight"] = log_request["http2_stream_weight"] | |
def calculate_page_stats(self, requests): | |
"""Calculate the page-level stats""" | |
- page = {'loadTime': 0, | |
- 'docTime': 0, | |
- 'fullyLoaded': 0, | |
- 'bytesOut': 0, | |
- 'bytesOutDoc': 0, | |
- 'bytesIn': 0, | |
- 'bytesInDoc': 0, | |
- 'requests': len(requests), | |
- 'requestsDoc': 0, | |
- 'responses_200': 0, | |
- 'responses_404': 0, | |
- 'responses_other': 0, | |
- 'result': 0, | |
- 'testStartOffset': 0, | |
- 'cached': 1 if self.task['cached'] else 0, | |
- 'optimization_checked': 0, | |
- 'start_epoch': int((self.task['start_time'] - | |
- datetime.utcfromtimestamp(0)).total_seconds()) | |
- } | |
- if 'loaded' in self.page: | |
- page['loadTime'] = int(round(self.page['loaded'] * 1000.0)) | |
- page['docTime'] = page['loadTime'] | |
- page['loadEventStart'] = page['loadTime'] | |
- page['loadEventEnd'] = page['loadTime'] | |
- if 'DOMContentLoaded' in self.page: | |
- page['domContentLoadedEventStart'] = int(round(self.page['DOMContentLoaded'] * 1000.0)) | |
- page['domContentLoadedEventEnd'] = page['domContentLoadedEventStart'] | |
+ page = { | |
+ "loadTime": 0, | |
+ "docTime": 0, | |
+ "fullyLoaded": 0, | |
+ "bytesOut": 0, | |
+ "bytesOutDoc": 0, | |
+ "bytesIn": 0, | |
+ "bytesInDoc": 0, | |
+ "requests": len(requests), | |
+ "requestsDoc": 0, | |
+ "responses_200": 0, | |
+ "responses_404": 0, | |
+ "responses_other": 0, | |
+ "result": 0, | |
+ "testStartOffset": 0, | |
+ "cached": 1 if self.task["cached"] else 0, | |
+ "optimization_checked": 0, | |
+ "start_epoch": int( | |
+ (self.task["start_time"] - datetime.utcfromtimestamp(0)).total_seconds() | |
+ ), | |
+ } | |
+ if "loaded" in self.page: | |
+ page["loadTime"] = int(round(self.page["loaded"] * 1000.0)) | |
+ page["docTime"] = page["loadTime"] | |
+ page["loadEventStart"] = page["loadTime"] | |
+ page["loadEventEnd"] = page["loadTime"] | |
+ if "DOMContentLoaded" in self.page: | |
+ page["domContentLoadedEventStart"] = int( | |
+ round(self.page["DOMContentLoaded"] * 1000.0) | |
+ ) | |
+ page["domContentLoadedEventEnd"] = page["domContentLoadedEventStart"] | |
main_request = None | |
index = 0 | |
for request in requests: | |
- if request['load_ms'] >= 0: | |
- end_time = request['load_start'] + request['load_ms'] | |
- if end_time > page['fullyLoaded']: | |
- page['fullyLoaded'] = end_time | |
- if end_time <= page['loadTime']: | |
- page['requestsDoc'] += 1 | |
- page['bytesInDoc'] += request['bytesIn'] | |
- page['bytesOutDoc'] += request['bytesOut'] | |
- page['bytesIn'] += request['bytesIn'] | |
- page['bytesOut'] += request['bytesOut'] | |
- if request['responseCode'] == 200: | |
- page['responses_200'] += 1 | |
- elif request['responseCode'] == 404: | |
- page['responses_404'] += 1 | |
- page['result'] = 99999 | |
- elif request['responseCode'] > -1: | |
- page['responses_other'] += 1 | |
- if main_request is None and \ | |
- (request['responseCode'] == 200 or request['responseCode'] == 304) and \ | |
- ('contentType' not in request or | |
- (request['contentType'] != 'application/ocsp-response' and | |
- request['contentType'] != 'application/pkix-crl')): | |
- main_request = request['id'] | |
- request['is_base_page'] = True | |
- page['final_base_page_request'] = index | |
- page['final_base_page_request_id'] = main_request | |
- page['final_url'] = request['full_url'] | |
- if 'URL' not in self.task['page_data']: | |
- self.task['page_data']['URL'] = page['final_url'] | |
- if request['ttfb_ms'] >= 0: | |
- page['TTFB'] = request['load_start'] + request['ttfb_ms'] | |
- if request['ssl_end'] >= request['ssl_start'] and \ | |
- request['ssl_start'] >= 0: | |
- page['basePageSSLTime'] = int(round(request['ssl_end'] - | |
- request['ssl_start'])) | |
- if page['responses_200'] == 0 and len(requests): | |
- if 'responseCode' in requests[0]: | |
- page['result'] = requests[0]['responseCode'] | |
+ if request["load_ms"] >= 0: | |
+ end_time = request["load_start"] + request["load_ms"] | |
+ if end_time > page["fullyLoaded"]: | |
+ page["fullyLoaded"] = end_time | |
+ if end_time <= page["loadTime"]: | |
+ page["requestsDoc"] += 1 | |
+ page["bytesInDoc"] += request["bytesIn"] | |
+ page["bytesOutDoc"] += request["bytesOut"] | |
+ page["bytesIn"] += request["bytesIn"] | |
+ page["bytesOut"] += request["bytesOut"] | |
+ if request["responseCode"] == 200: | |
+ page["responses_200"] += 1 | |
+ elif request["responseCode"] == 404: | |
+ page["responses_404"] += 1 | |
+ page["result"] = 99999 | |
+ elif request["responseCode"] > -1: | |
+ page["responses_other"] += 1 | |
+ if ( | |
+ main_request is None | |
+ and (request["responseCode"] == 200 or request["responseCode"] == 304) | |
+ and ( | |
+ "contentType" not in request | |
+ or ( | |
+ request["contentType"] != "application/ocsp-response" | |
+ and request["contentType"] != "application/pkix-crl" | |
+ ) | |
+ ) | |
+ ): | |
+ main_request = request["id"] | |
+ request["is_base_page"] = True | |
+ page["final_base_page_request"] = index | |
+ page["final_base_page_request_id"] = main_request | |
+ page["final_url"] = request["full_url"] | |
+ if "URL" not in self.task["page_data"]: | |
+ self.task["page_data"]["URL"] = page["final_url"] | |
+ if request["ttfb_ms"] >= 0: | |
+ page["TTFB"] = request["load_start"] + request["ttfb_ms"] | |
+ if ( | |
+ request["ssl_end"] >= request["ssl_start"] | |
+ and request["ssl_start"] >= 0 | |
+ ): | |
+ page["basePageSSLTime"] = int( | |
+ round(request["ssl_end"] - request["ssl_start"]) | |
+ ) | |
+ if page["responses_200"] == 0 and len(requests): | |
+ if "responseCode" in requests[0]: | |
+ page["result"] = requests[0]["responseCode"] | |
else: | |
- page['result'] = 12999 | |
- self.task['page_result'] = page['result'] | |
+ page["result"] = 12999 | |
+ self.task["page_result"] = page["result"] | |
return page | |
--- internal/support/firefox_log_parser.py 2018-11-07 20:41:27.845444 +0000 | |
+++ internal/support/firefox_log_parser.py 2019-02-06 17:08:28.417548 +0000 | |
@@ -19,29 +19,40 @@ | |
import logging | |
import os | |
import re | |
import urlparse | |
import monotonic | |
+ | |
try: | |
import ujson as json | |
except BaseException: | |
import json | |
+ | |
class FirefoxLogParser(object): | |
"""Handle parsing of firefox logs""" | |
+ | |
def __init__(self): | |
self.start_time = None | |
self.start_day = None | |
self.unique_id = 0 | |
self.int_map = {} | |
for val in xrange(0, 100): | |
- self.int_map['{0:02d}'.format(val)] = float(val) | |
+ self.int_map["{0:02d}".format(val)] = float(val) | |
self.dns = {} | |
- self.http = {'channels': {}, 'requests': {}, 'connections': {}, 'sockets': {}, 'streams': {}} | |
- self.logline = re.compile(r'^(?P<timestamp>\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+) \w+ - ' | |
- r'\[(?P<thread>[^\]]+)\]: (?P<level>\w)/(?P<category>[^ ]+) ' | |
- r'(?P<message>[^\r\n]+)') | |
+ self.http = { | |
+ "channels": {}, | |
+ "requests": {}, | |
+ "connections": {}, | |
+ "sockets": {}, | |
+ "streams": {}, | |
+ } | |
+ self.logline = re.compile( | |
+ r"^(?P<timestamp>\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+) \w+ - " | |
+ r"\[(?P<thread>[^\]]+)\]: (?P<level>\w)/(?P<category>[^ ]+) " | |
+ r"(?P<message>[^\r\n]+)" | |
+ ) | |
def set_start_time(self, timestamp): | |
"""Store the start time""" | |
self.start_day = int(timestamp[8:10]) | |
hour = int(timestamp[11:13]) | |
@@ -51,82 +62,91 @@ | |
self.start_time = float(hour * 3600 + minute * 60 + second) + usecond | |
def process_logs(self, log_file, start_time): | |
"""Process multiple child logs and generate a resulting requests and page data file""" | |
self.__init__() | |
- files = sorted(glob.glob(log_file + '*')) | |
+ files = sorted(glob.glob(log_file + "*")) | |
self.set_start_time(start_time) | |
for path in files: | |
try: | |
self.process_log_file(path) | |
except Exception: | |
pass | |
return self.finish_processing() | |
def finish_processing(self): | |
"""Do the post-parse processing""" | |
- logging.debug('Processing network requests from moz log') | |
+ logging.debug("Processing network requests from moz log") | |
# Pass the HTTP/2 stream information to the requests | |
- for stream_key in self.http['streams']: | |
- stream = self.http['streams'][stream_key] | |
- if 'request_id' in stream and stream['request_id'] in self.http['requests']: | |
- request = self.http['requests'][stream['request_id']] | |
- if 'stream_id' in stream: | |
- request['http2_stream_id'] = stream['stream_id'] | |
- if 'parent_stream_id' in stream: | |
- request['http2_stream_dependency'] = stream['parent_stream_id'] | |
- if 'weight' in stream: | |
- request['http2_stream_weight'] = stream['weight'] | |
+ for stream_key in self.http["streams"]: | |
+ stream = self.http["streams"][stream_key] | |
+ if "request_id" in stream and stream["request_id"] in self.http["requests"]: | |
+ request = self.http["requests"][stream["request_id"]] | |
+ if "stream_id" in stream: | |
+ request["http2_stream_id"] = stream["stream_id"] | |
+ if "parent_stream_id" in stream: | |
+ request["http2_stream_dependency"] = stream["parent_stream_id"] | |
+ if "weight" in stream: | |
+ request["http2_stream_weight"] = stream["weight"] | |
requests = [] | |
# Pull out the network requests and sort them | |
- for request_id in self.http['requests']: | |
- request = self.http['requests'][request_id] | |
- if 'url' in request and request['url'][0:22] != 'http://127.0.0.1:8888/'\ | |
- and 'start' in request: | |
- request['id'] = request_id | |
+ for request_id in self.http["requests"]: | |
+ request = self.http["requests"][request_id] | |
+ if ( | |
+ "url" in request | |
+ and request["url"][0:22] != "http://127.0.0.1:8888/" | |
+ and "start" in request | |
+ ): | |
+ request["id"] = request_id | |
requests.append(dict(request)) | |
if len(requests): | |
- requests.sort(key=lambda x: x['start'] if 'start' in x else 0) | |
+ requests.sort(key=lambda x: x["start"] if "start" in x else 0) | |
# Attach the DNS lookups to the first request on each domain | |
for domain in self.dns: | |
- if 'claimed' not in self.dns[domain]: | |
+ if "claimed" not in self.dns[domain]: | |
for request in requests: | |
- host = urlparse.urlsplit(request['url']).hostname | |
+ host = urlparse.urlsplit(request["url"]).hostname | |
if host == domain: | |
- self.dns[domain]['claimed'] = True | |
- if 'start' in self.dns[domain]: | |
- request['dns_start'] = self.dns[domain]['start'] | |
- if 'end' in self.dns[domain]: | |
- request['dns_end'] = self.dns[domain]['end'] | |
+ self.dns[domain]["claimed"] = True | |
+ if "start" in self.dns[domain]: | |
+ request["dns_start"] = self.dns[domain]["start"] | |
+ if "end" in self.dns[domain]: | |
+ request["dns_end"] = self.dns[domain]["end"] | |
break | |
# Attach the socket connect events to the first request on each connection | |
for request in requests: | |
- if 'connection' in request and request['connection'] in self.http['connections']: | |
- connection = self.http['connections'][request['connection']] | |
- if 'socket' in connection and connection['socket'] in self.http['sockets']: | |
- socket = self.http['sockets'][connection['socket']] | |
- if 'claimed' not in socket: | |
- socket['claimed'] = True | |
- if 'start' in socket: | |
- request['connect_start'] = socket['start'] | |
- if 'end' in socket: | |
- request['connect_end'] = socket['end'] | |
- if 'ssl_start' in connection and 'ssl_end' in connection: | |
- request['ssl_start'] = connection['ssl_start'] | |
- request['ssl_end'] = connection['ssl_end'] | |
+ if ( | |
+ "connection" in request | |
+ and request["connection"] in self.http["connections"] | |
+ ): | |
+ connection = self.http["connections"][request["connection"]] | |
+ if ( | |
+ "socket" in connection | |
+ and connection["socket"] in self.http["sockets"] | |
+ ): | |
+ socket = self.http["sockets"][connection["socket"]] | |
+ if "claimed" not in socket: | |
+ socket["claimed"] = True | |
+ if "start" in socket: | |
+ request["connect_start"] = socket["start"] | |
+ if "end" in socket: | |
+ request["connect_end"] = socket["end"] | |
+ if "ssl_start" in connection and "ssl_end" in connection: | |
+ request["ssl_start"] = connection["ssl_start"] | |
+ request["ssl_end"] = connection["ssl_end"] | |
return requests | |
def process_log_file(self, path): | |
"""Process a single log file""" | |
logging.debug("Processing %s", path) | |
start = monotonic.monotonic() | |
_, ext = os.path.splitext(path) | |
line_count = 0 | |
- if ext.lower() == '.gz': | |
- f_in = gzip.open(path, 'rb') | |
+ if ext.lower() == ".gz": | |
+ f_in = gzip.open(path, "rb") | |
else: | |
- f_in = open(path, 'r') | |
+ f_in = open(path, "r") | |
for line in f_in: | |
line_count += 1 | |
line = line.rstrip("\r\n") | |
self.process_log_line(line) | |
f_in.close() | |
@@ -143,328 +163,431 @@ | |
# %Y-%m-%d %H:%M:%S.%f - 2017-06-27 13:46:10.048844 | |
day = int_map[timestamp[8:10]] | |
hour = int_map[timestamp[11:13]] | |
minute = int_map[timestamp[14:16]] | |
second = int_map[timestamp[17:19]] | |
- usecond = int_map[timestamp[20:22]] * 10000 + \ | |
- int_map[timestamp[22:24]] * 100 + int_map[timestamp[24:26]] | |
- event_time = (hour * 3600.0 + minute * 60.0 + second) + (usecond / 1000000) | |
+ usecond = ( | |
+ int_map[timestamp[20:22]] * 10000 | |
+ + int_map[timestamp[22:24]] * 100 | |
+ + int_map[timestamp[24:26]] | |
+ ) | |
+ event_time = (hour * 3600.0 + minute * 60.0 + second) + ( | |
+ usecond / 1000000 | |
+ ) | |
if day == self.start_day: | |
elapsed = event_time - self.start_time | |
else: | |
elapsed = event_time + (float(3600 * 24) - self.start_time) | |
- msg['timestamp'] = elapsed | |
- if msg['timestamp'] >= 0: | |
- offset = line.find(']: ', 32) | |
+ msg["timestamp"] = elapsed | |
+ if msg["timestamp"] >= 0: | |
+ offset = line.find("]: ", 32) | |
if offset >= 0: | |
thread = line[34:offset] | |
- separator = thread.find(':') | |
+ separator = thread.find(":") | |
if separator >= 0: | |
- thread = thread[separator + 1:].strip() | |
- msg['thread'] = thread | |
- msg['level'] = line[offset + 3:offset + 4] | |
- msg_start = line.find(' ', offset + 5) | |
+ thread = thread[separator + 1 :].strip() | |
+ msg["thread"] = thread | |
+ msg["level"] = line[offset + 3 : offset + 4] | |
+ msg_start = line.find(" ", offset + 5) | |
if msg_start >= 0: | |
- msg['category'] = line[offset + 5:msg_start] | |
- msg['message'] = line[msg_start + 1:] | |
- if msg['category'] == 'nsHttp': | |
- if msg['thread'] == 'Main Thread': | |
+ msg["category"] = line[offset + 5 : msg_start] | |
+ msg["message"] = line[msg_start + 1 :] | |
+ if msg["category"] == "nsHttp": | |
+ if msg["thread"] == "Main Thread": | |
self.main_thread_http_entry(msg) | |
- elif msg['thread'] == 'Socket Thread': | |
+ elif msg["thread"] == "Socket Thread": | |
self.socket_thread_http_entry(msg) | |
- elif msg['category'] == 'nsSocketTransport': | |
+ elif msg["category"] == "nsSocketTransport": | |
self.socket_transport_entry(msg) | |
- elif msg['category'] == 'nsHostResolver': | |
+ elif msg["category"] == "nsHostResolver": | |
self.dns_entry(msg) | |
except Exception: | |
pass | |
def main_thread_http_entry(self, msg): | |
"""Process a single HTTP log line from the main thread""" | |
# V/nsHttp HttpBaseChannel::Init [this=c30d000] | |
- if msg['message'].startswith('HttpBaseChannel::Init'): | |
- match = re.search(r'^HttpBaseChannel::Init \[this=(?P<channel>[\w\d]+)]', | |
- msg['message']) | |
- if match: | |
- self.http['current_channel'] = match.groupdict().get('channel') | |
+ if msg["message"].startswith("HttpBaseChannel::Init"): | |
+ match = re.search( | |
+ r"^HttpBaseChannel::Init \[this=(?P<channel>[\w\d]+)]", msg["message"] | |
+ ) | |
+ if match: | |
+ self.http["current_channel"] = match.groupdict().get("channel") | |
# D/nsHttp nsHttpChannel::Init [this=c30d000] | |
- elif 'current_channel' in self.http and msg['message'].startswith('nsHttpChannel::Init'): | |
- del self.http['current_channel'] | |
+ elif "current_channel" in self.http and msg["message"].startswith( | |
+ "nsHttpChannel::Init" | |
+ ): | |
+ del self.http["current_channel"] | |
# V/nsHttp uri=http://www.webpagetest.org/?bare=1 | |
- elif 'current_channel' in self.http and msg['message'].startswith('uri='): | |
- match = re.search(r'^uri=(?P<url>[^ \r\n]+)', msg['message']) | |
- if match: | |
- self.http['channels'][self.http['current_channel']] = \ | |
- match.groupdict().get('url') | |
+ elif "current_channel" in self.http and msg["message"].startswith("uri="): | |
+ match = re.search(r"^uri=(?P<url>[^ \r\n]+)", msg["message"]) | |
+ if match: | |
+ self.http["channels"][ | |
+ self.http["current_channel"] | |
+ ] = match.groupdict().get("url") | |
# D/nsHttp nsHttpChannel c30d000 created nsHttpTransaction c138c00 | |
- elif msg['message'].startswith('nsHttpChannel') and \ | |
- msg['message'].find(' created nsHttpTransaction ') > -1: | |
- match = re.search(r'^nsHttpChannel (?P<channel>[\w\d]+) created '\ | |
- r'nsHttpTransaction (?P<id>[\w\d]+)', msg['message']) | |
- if match: | |
- channel = match.groupdict().get('channel') | |
- if channel in self.http['channels']: | |
- url = self.http['channels'][channel] | |
- del self.http['channels'][channel] | |
- trans_id = match.groupdict().get('id') | |
+ elif ( | |
+ msg["message"].startswith("nsHttpChannel") | |
+ and msg["message"].find(" created nsHttpTransaction ") > -1 | |
+ ): | |
+ match = re.search( | |
+ r"^nsHttpChannel (?P<channel>[\w\d]+) created " | |
+ r"nsHttpTransaction (?P<id>[\w\d]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ channel = match.groupdict().get("channel") | |
+ if channel in self.http["channels"]: | |
+ url = self.http["channels"][channel] | |
+ del self.http["channels"][channel] | |
+ trans_id = match.groupdict().get("id") | |
# If there is already an existing transaction with the same ID, | |
# move it to a unique ID. | |
- if trans_id in self.http['requests']: | |
- tmp_request = self.http['requests'][trans_id] | |
- del self.http['requests'][trans_id] | |
+ if trans_id in self.http["requests"]: | |
+ tmp_request = self.http["requests"][trans_id] | |
+ del self.http["requests"][trans_id] | |
self.unique_id += 1 | |
- new_id = '{0}.{1:d}'.format(trans_id, self.unique_id) | |
- self.http['requests'][new_id] = tmp_request | |
- self.http['requests'][trans_id] = {'url': url, | |
- 'request_headers': [], | |
- 'response_headers': [], | |
- 'status': None, | |
- 'bytes_in': 0, | |
- 'chunks': []} | |
+ new_id = "{0}.{1:d}".format(trans_id, self.unique_id) | |
+ self.http["requests"][new_id] = tmp_request | |
+ self.http["requests"][trans_id] = { | |
+ "url": url, | |
+ "request_headers": [], | |
+ "response_headers": [], | |
+ "status": None, | |
+ "bytes_in": 0, | |
+ "chunks": [], | |
+ } | |
# D/nsHttp nsHttpTransaction::Init [this=c138c00 caps=21] | |
- elif msg['message'].startswith('nsHttpTransaction::Init '): | |
- match = re.search(r'^nsHttpTransaction::Init \[this=(?P<id>[\w\d]+)', msg['message']) | |
- if match: | |
- trans_id = match.groupdict().get('id') | |
- self.http['current_transaction'] = trans_id | |
+ elif msg["message"].startswith("nsHttpTransaction::Init "): | |
+ match = re.search( | |
+ r"^nsHttpTransaction::Init \[this=(?P<id>[\w\d]+)", msg["message"] | |
+ ) | |
+ if match: | |
+ trans_id = match.groupdict().get("id") | |
+ self.http["current_transaction"] = trans_id | |
# D/nsHttp nsHttpTransaction c138c00 SetRequestContext c15ba00 | |
- elif 'current_transaction' in self.http and \ | |
- msg['message'].startswith('nsHttpTransaction ') and \ | |
- msg['message'].find(' SetRequestContext ') > -1: | |
- del self.http['current_transaction'] | |
+ elif ( | |
+ "current_transaction" in self.http | |
+ and msg["message"].startswith("nsHttpTransaction ") | |
+ and msg["message"].find(" SetRequestContext ") > -1 | |
+ ): | |
+ del self.http["current_transaction"] | |
# I/nsHttp http request [ | |
- elif 'current_transaction' in self.http and msg['message'] == 'http request [': | |
- self.http['request_headers'] = self.http['current_transaction'] | |
- elif 'request_headers' in self.http and msg['message'] == ']': | |
- del self.http['request_headers'] | |
+ elif "current_transaction" in self.http and msg["message"] == "http request [": | |
+ self.http["request_headers"] = self.http["current_transaction"] | |
+ elif "request_headers" in self.http and msg["message"] == "]": | |
+ del self.http["request_headers"] | |
# Individual request headers | |
- elif 'request_headers' in self.http and msg['message'][0:2] == ' ': | |
- trans_id = self.http['request_headers'] | |
- if trans_id in self.http['requests']: | |
- self.http['requests'][trans_id]['request_headers'].append(msg['message'][2:]) | |
+ elif "request_headers" in self.http and msg["message"][0:2] == " ": | |
+ trans_id = self.http["request_headers"] | |
+ if trans_id in self.http["requests"]: | |
+ self.http["requests"][trans_id]["request_headers"].append( | |
+ msg["message"][2:] | |
+ ) | |
def socket_thread_http_entry(self, msg): | |
"""Process a single HTTP log line from the socket thread""" | |
# V/nsHttp nsHttpConnection::Activate [this=ed6c450 trans=143f3c00 caps=21] | |
- if msg['message'].startswith('nsHttpConnection::Activate '): | |
- match = re.search(r'^nsHttpConnection::Activate \[' | |
- r'this=(?P<connection>[\w\d]+) ' | |
- r'trans=(?P<id>[\w\d]+)', msg['message']) | |
- if match: | |
- connection = match.groupdict().get('connection') | |
- trans_id = match.groupdict().get('id') | |
- if trans_id in self.http['requests']: | |
- self.http['requests'][trans_id]['connection'] = connection | |
+ if msg["message"].startswith("nsHttpConnection::Activate "): | |
+ match = re.search( | |
+ r"^nsHttpConnection::Activate \[" | |
+ r"this=(?P<connection>[\w\d]+) " | |
+ r"trans=(?P<id>[\w\d]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ connection = match.groupdict().get("connection") | |
+ trans_id = match.groupdict().get("id") | |
+ if trans_id in self.http["requests"]: | |
+ self.http["requests"][trans_id]["connection"] = connection | |
# V/nsHttp nsHttpConnection::Init this=ed6c450 | |
- elif msg['message'].startswith('nsHttpConnection::Init ') and \ | |
- 'current_socket' in self.http: | |
- match = re.search(r'^nsHttpConnection::Init ' | |
- r'this=(?P<connection>[\w\d]+)', msg['message']) | |
- if match: | |
- connection = match.groupdict().get('connection') | |
- socket = self.http['current_socket'] | |
- self.http['connections'][connection] = {'socket': socket} | |
- del self.http['current_socket'] | |
- elif msg['message'].startswith('nsHttpConnection::SetupSSL '): | |
- match = re.search(r'^nsHttpConnection::SetupSSL (?P<connection>[\w\d]+)', | |
- msg['message']) | |
- if match: | |
- connection = match.groupdict().get('connection') | |
- if connection in self.http['connections']: | |
- if 'ssl_start' not in self.http['connections'][connection]: | |
- self.http['connections'][connection]['ssl_start'] = msg['timestamp'] | |
- elif msg['message'].startswith('nsHttpConnection::EnsureNPNComplete '): | |
- match = re.search(r'^nsHttpConnection::EnsureNPNComplete (?P<connection>[\w\d]+)', | |
- msg['message']) | |
- if match: | |
- connection = match.groupdict().get('connection') | |
- if connection in self.http['connections']: | |
- if 'ssl_start' in self.http['connections'][connection]: | |
- self.http['connections'][connection]['ssl_end'] = msg['timestamp'] | |
- elif msg['message'].startswith('nsHttpTransaction::OnTransportStatus ') and \ | |
- msg['message'].find(' SENDING_TO ') > -1: | |
- match = re.search(r'^nsHttpTransaction::OnTransportStatus (?P<id>[\w\d]+) SENDING_TO ', | |
- msg['message']) | |
- if match: | |
- trans_id = match.groupdict().get('id') | |
- if trans_id in self.http['requests'] and \ | |
- 'start' not in self.http['requests'][trans_id]: | |
- self.http['requests'][trans_id]['start'] = msg['timestamp'] | |
- elif msg['message'].startswith('nsHttpTransaction::OnSocketStatus ') and \ | |
- msg['message'].find(' status=804b0005 progress=') > -1: | |
- match = re.search(r'^nsHttpTransaction::OnSocketStatus '\ | |
- r'\[this=(?P<id>[\w\d]+) status=804b0005 progress=(?P<bytes>[\d+]+)', | |
- msg['message']) | |
- if match: | |
- trans_id = match.groupdict().get('id') | |
- byte_count = int(match.groupdict().get('bytes')) | |
- if byte_count > 0 and trans_id in self.http['requests'] and \ | |
- 'start' not in self.http['requests'][trans_id]: | |
- self.http['requests'][trans_id]['start'] = msg['timestamp'] | |
- elif msg['message'].startswith('nsHttpTransaction::ProcessData '): | |
- match = re.search(r'^nsHttpTransaction::ProcessData \[this=(?P<id>[\w\d]+)', | |
- msg['message']) | |
- if match: | |
- trans_id = match.groupdict().get('id') | |
- self.http['current_socket_transaction'] = trans_id | |
- elif msg['message'].startswith('nsHttpTransaction::HandleContent '): | |
- if 'current_socket_transaction' in self.http: | |
- del self.http['current_socket_transaction'] | |
- match = re.search(r'^nsHttpTransaction::HandleContent \[' | |
- r'this=(?P<id>[\w\d]+) ' | |
- r'count=(?P<len>[\d]+) read=', msg['message']) | |
- if match: | |
- trans_id = match.groupdict().get('id') | |
- if trans_id in self.http['requests']: | |
- bytes_in = int(match.groupdict().get('len')) | |
- if 'first_byte' not in self.http['requests'][trans_id]: | |
- self.http['requests'][trans_id]['first_byte'] = msg['timestamp'] | |
- if 'end' not in self.http['requests'][trans_id] or \ | |
- msg['timestamp'] > self.http['requests'][trans_id]['end']: | |
- self.http['requests'][trans_id]['end'] = msg['timestamp'] | |
- self.http['requests'][trans_id]['bytes_in'] += bytes_in | |
- self.http['requests'][trans_id]['chunks'].append(\ | |
- {'ts': msg['timestamp'], 'bytes': bytes_in}) | |
- elif msg['message'].startswith('Http2Stream::Http2Stream '): | |
- match = re.search(r'^Http2Stream::Http2Stream ' | |
- r'(?P<stream>[\w\d]+) ' | |
- r'trans=(?P<id>[\w\d]+) ', msg['message']) | |
- if match: | |
- stream = match.groupdict().get('stream') | |
- trans_id = match.groupdict().get('id') | |
- if stream not in self.http['streams']: | |
- self.http['streams'][stream] = {} | |
- if 'trans_id' not in self.http['streams'][stream]: | |
- self.http['streams'][stream]['request_id'] = trans_id | |
- elif msg['message'].startswith('Http2Session::RegisterStreamID '): | |
- match = re.search(r'^Http2Session::RegisterStreamID ' | |
- r'session=[\w\d]+ ' | |
- r'stream=(?P<stream>[\w\d]+) ' | |
- r'id=(?P<id>0x[\w\d]+) ', msg['message']) | |
- if match: | |
- stream = match.groupdict().get('stream') | |
- stream_id = int(match.groupdict().get('id'), 16) | |
- if stream in self.http['streams']: | |
- self.http['streams'][stream]['stream_id'] = stream_id | |
- elif msg['message'].startswith('Http2Stream::UpdatePriorityDependency '): | |
- match = re.search(r'^Http2Stream::UpdatePriorityDependency ' | |
- r'(?P<stream>[\w\d]+) ' | |
- r'depends on stream (?P<parent>0x[\w\d]+) ', msg['message']) | |
- if match: | |
- stream = match.groupdict().get('stream') | |
- parent_id = int(match.groupdict().get('parent'), 16) | |
- if stream in self.http['streams']: | |
- self.http['streams'][stream]['parent_stream_id'] = parent_id | |
- elif msg['message'].startswith('Http2Stream '): | |
- match = re.search(r'^Http2Stream ' | |
- r'(?P<stream>[\w\d]+) ' | |
- r'Generating [\d]+ bytes of HEADERS for ' | |
- r'stream (?P<id>0x[\w\d]+) ' | |
- r'with priority weight (?P<weight>[\d]+) ' | |
- r'dep (?P<parent>0x[\w\d]+) ', msg['message']) | |
- if match: | |
- stream = match.groupdict().get('stream') | |
- stream_id = int(match.groupdict().get('id'), 16) | |
- weight = int(match.groupdict().get('weight'), 10) | |
- parent_id = int(match.groupdict().get('parent'), 16) | |
- if stream in self.http['streams']: | |
- self.http['streams'][stream]['stream_id'] = stream_id | |
- self.http['streams'][stream]['weight'] = weight | |
- self.http['streams'][stream]['parent_stream_id'] = parent_id | |
- elif 'current_socket_transaction' in self.http and \ | |
- msg['message'].startswith('nsHttpTransaction::ParseLine '): | |
- trans_id = self.http['current_socket_transaction'] | |
- if trans_id in self.http['requests']: | |
- if trans_id in self.http['requests']: | |
- if 'first_byte' not in self.http['requests'][trans_id]: | |
- self.http['requests'][trans_id]['first_byte'] = msg['timestamp'] | |
- if 'end' not in self.http['requests'][trans_id] or \ | |
- msg['timestamp'] > self.http['requests'][trans_id]['end']: | |
- self.http['requests'][trans_id]['end'] = msg['timestamp'] | |
- match = re.search(r'^nsHttpTransaction::ParseLine \[(?P<line>.*)\]\s*$', | |
- msg['message']) | |
+ elif ( | |
+ msg["message"].startswith("nsHttpConnection::Init ") | |
+ and "current_socket" in self.http | |
+ ): | |
+ match = re.search( | |
+ r"^nsHttpConnection::Init " r"this=(?P<connection>[\w\d]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ connection = match.groupdict().get("connection") | |
+ socket = self.http["current_socket"] | |
+ self.http["connections"][connection] = {"socket": socket} | |
+ del self.http["current_socket"] | |
+ elif msg["message"].startswith("nsHttpConnection::SetupSSL "): | |
+ match = re.search( | |
+ r"^nsHttpConnection::SetupSSL (?P<connection>[\w\d]+)", msg["message"] | |
+ ) | |
+ if match: | |
+ connection = match.groupdict().get("connection") | |
+ if connection in self.http["connections"]: | |
+ if "ssl_start" not in self.http["connections"][connection]: | |
+ self.http["connections"][connection]["ssl_start"] = msg[ | |
+ "timestamp" | |
+ ] | |
+ elif msg["message"].startswith("nsHttpConnection::EnsureNPNComplete "): | |
+ match = re.search( | |
+ r"^nsHttpConnection::EnsureNPNComplete (?P<connection>[\w\d]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ connection = match.groupdict().get("connection") | |
+ if connection in self.http["connections"]: | |
+ if "ssl_start" in self.http["connections"][connection]: | |
+ self.http["connections"][connection]["ssl_end"] = msg[ | |
+ "timestamp" | |
+ ] | |
+ elif ( | |
+ msg["message"].startswith("nsHttpTransaction::OnTransportStatus ") | |
+ and msg["message"].find(" SENDING_TO ") > -1 | |
+ ): | |
+ match = re.search( | |
+ r"^nsHttpTransaction::OnTransportStatus (?P<id>[\w\d]+) SENDING_TO ", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ trans_id = match.groupdict().get("id") | |
+ if ( | |
+ trans_id in self.http["requests"] | |
+ and "start" not in self.http["requests"][trans_id] | |
+ ): | |
+ self.http["requests"][trans_id]["start"] = msg["timestamp"] | |
+ elif ( | |
+ msg["message"].startswith("nsHttpTransaction::OnSocketStatus ") | |
+ and msg["message"].find(" status=804b0005 progress=") > -1 | |
+ ): | |
+ match = re.search( | |
+ r"^nsHttpTransaction::OnSocketStatus " | |
+ r"\[this=(?P<id>[\w\d]+) status=804b0005 progress=(?P<bytes>[\d+]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ trans_id = match.groupdict().get("id") | |
+ byte_count = int(match.groupdict().get("bytes")) | |
+ if ( | |
+ byte_count > 0 | |
+ and trans_id in self.http["requests"] | |
+ and "start" not in self.http["requests"][trans_id] | |
+ ): | |
+ self.http["requests"][trans_id]["start"] = msg["timestamp"] | |
+ elif msg["message"].startswith("nsHttpTransaction::ProcessData "): | |
+ match = re.search( | |
+ r"^nsHttpTransaction::ProcessData \[this=(?P<id>[\w\d]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ trans_id = match.groupdict().get("id") | |
+ self.http["current_socket_transaction"] = trans_id | |
+ elif msg["message"].startswith("nsHttpTransaction::HandleContent "): | |
+ if "current_socket_transaction" in self.http: | |
+ del self.http["current_socket_transaction"] | |
+ match = re.search( | |
+ r"^nsHttpTransaction::HandleContent \[" | |
+ r"this=(?P<id>[\w\d]+) " | |
+ r"count=(?P<len>[\d]+) read=", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ trans_id = match.groupdict().get("id") | |
+ if trans_id in self.http["requests"]: | |
+ bytes_in = int(match.groupdict().get("len")) | |
+ if "first_byte" not in self.http["requests"][trans_id]: | |
+ self.http["requests"][trans_id]["first_byte"] = msg["timestamp"] | |
+ if ( | |
+ "end" not in self.http["requests"][trans_id] | |
+ or msg["timestamp"] > self.http["requests"][trans_id]["end"] | |
+ ): | |
+ self.http["requests"][trans_id]["end"] = msg["timestamp"] | |
+ self.http["requests"][trans_id]["bytes_in"] += bytes_in | |
+ self.http["requests"][trans_id]["chunks"].append( | |
+ {"ts": msg["timestamp"], "bytes": bytes_in} | |
+ ) | |
+ elif msg["message"].startswith("Http2Stream::Http2Stream "): | |
+ match = re.search( | |
+ r"^Http2Stream::Http2Stream " | |
+ r"(?P<stream>[\w\d]+) " | |
+ r"trans=(?P<id>[\w\d]+) ", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ stream = match.groupdict().get("stream") | |
+ trans_id = match.groupdict().get("id") | |
+ if stream not in self.http["streams"]: | |
+ self.http["streams"][stream] = {} | |
+ if "trans_id" not in self.http["streams"][stream]: | |
+ self.http["streams"][stream]["request_id"] = trans_id | |
+ elif msg["message"].startswith("Http2Session::RegisterStreamID "): | |
+ match = re.search( | |
+ r"^Http2Session::RegisterStreamID " | |
+ r"session=[\w\d]+ " | |
+ r"stream=(?P<stream>[\w\d]+) " | |
+ r"id=(?P<id>0x[\w\d]+) ", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ stream = match.groupdict().get("stream") | |
+ stream_id = int(match.groupdict().get("id"), 16) | |
+ if stream in self.http["streams"]: | |
+ self.http["streams"][stream]["stream_id"] = stream_id | |
+ elif msg["message"].startswith("Http2Stream::UpdatePriorityDependency "): | |
+ match = re.search( | |
+ r"^Http2Stream::UpdatePriorityDependency " | |
+ r"(?P<stream>[\w\d]+) " | |
+ r"depends on stream (?P<parent>0x[\w\d]+) ", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ stream = match.groupdict().get("stream") | |
+ parent_id = int(match.groupdict().get("parent"), 16) | |
+ if stream in self.http["streams"]: | |
+ self.http["streams"][stream]["parent_stream_id"] = parent_id | |
+ elif msg["message"].startswith("Http2Stream "): | |
+ match = re.search( | |
+ r"^Http2Stream " | |
+ r"(?P<stream>[\w\d]+) " | |
+ r"Generating [\d]+ bytes of HEADERS for " | |
+ r"stream (?P<id>0x[\w\d]+) " | |
+ r"with priority weight (?P<weight>[\d]+) " | |
+ r"dep (?P<parent>0x[\w\d]+) ", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ stream = match.groupdict().get("stream") | |
+ stream_id = int(match.groupdict().get("id"), 16) | |
+ weight = int(match.groupdict().get("weight"), 10) | |
+ parent_id = int(match.groupdict().get("parent"), 16) | |
+ if stream in self.http["streams"]: | |
+ self.http["streams"][stream]["stream_id"] = stream_id | |
+ self.http["streams"][stream]["weight"] = weight | |
+ self.http["streams"][stream]["parent_stream_id"] = parent_id | |
+ elif "current_socket_transaction" in self.http and msg["message"].startswith( | |
+ "nsHttpTransaction::ParseLine " | |
+ ): | |
+ trans_id = self.http["current_socket_transaction"] | |
+ if trans_id in self.http["requests"]: | |
+ if trans_id in self.http["requests"]: | |
+ if "first_byte" not in self.http["requests"][trans_id]: | |
+ self.http["requests"][trans_id]["first_byte"] = msg["timestamp"] | |
+ if ( | |
+ "end" not in self.http["requests"][trans_id] | |
+ or msg["timestamp"] > self.http["requests"][trans_id]["end"] | |
+ ): | |
+ self.http["requests"][trans_id]["end"] = msg["timestamp"] | |
+ match = re.search( | |
+ r"^nsHttpTransaction::ParseLine \[(?P<line>.*)\]\s*$", | |
+ msg["message"], | |
+ ) | |
if match: | |
- line = match.groupdict().get('line') | |
- self.http['requests'][trans_id]['response_headers'].append(line) | |
- elif 'current_socket_transaction' in self.http and \ | |
- msg['message'].startswith('Have status line '): | |
- trans_id = self.http['current_socket_transaction'] | |
- if trans_id in self.http['requests']: | |
- if trans_id in self.http['requests']: | |
- if 'first_byte' not in self.http['requests'][trans_id]: | |
- self.http['requests'][trans_id]['first_byte'] = msg['timestamp'] | |
- if 'end' not in self.http['requests'][trans_id] or \ | |
- msg['timestamp'] > self.http['requests'][trans_id]['end']: | |
- self.http['requests'][trans_id]['end'] = msg['timestamp'] | |
- match = re.search(r'^Have status line \[[^\]]*status=(?P<status>\d+)', | |
- msg['message']) | |
+ line = match.groupdict().get("line") | |
+ self.http["requests"][trans_id]["response_headers"].append(line) | |
+ elif "current_socket_transaction" in self.http and msg["message"].startswith( | |
+ "Have status line " | |
+ ): | |
+ trans_id = self.http["current_socket_transaction"] | |
+ if trans_id in self.http["requests"]: | |
+ if trans_id in self.http["requests"]: | |
+ if "first_byte" not in self.http["requests"][trans_id]: | |
+ self.http["requests"][trans_id]["first_byte"] = msg["timestamp"] | |
+ if ( | |
+ "end" not in self.http["requests"][trans_id] | |
+ or msg["timestamp"] > self.http["requests"][trans_id]["end"] | |
+ ): | |
+ self.http["requests"][trans_id]["end"] = msg["timestamp"] | |
+ match = re.search( | |
+ r"^Have status line \[[^\]]*status=(?P<status>\d+)", msg["message"] | |
+ ) | |
if match: | |
- status = int(match.groupdict().get('status')) | |
- self.http['requests'][trans_id]['status'] = status | |
+ status = int(match.groupdict().get("status")) | |
+ self.http["requests"][trans_id]["status"] = status | |
def socket_transport_entry(self, msg): | |
"""Process a single socket transport line""" | |
# nsSocketTransport::Init [this=143f4000 host=www.webpagetest.org:80 origin=www.webpagetest.org:80 proxy=:0] | |
- if msg['message'].startswith('nsSocketTransport::Init '): | |
- match = re.search(r'^nsSocketTransport::Init \[' | |
- r'this=(?P<socket>[\w\d]+) ' | |
- r'host=(?P<host>[^ :]+):(?P<port>\d+)', msg['message']) | |
- if match: | |
- socket = match.groupdict().get('socket') | |
- host = match.groupdict().get('host') | |
- port = match.groupdict().get('port') | |
- self.http['sockets'][socket] = {'host': host, 'port': port} | |
+ if msg["message"].startswith("nsSocketTransport::Init "): | |
+ match = re.search( | |
+ r"^nsSocketTransport::Init \[" | |
+ r"this=(?P<socket>[\w\d]+) " | |
+ r"host=(?P<host>[^ :]+):(?P<port>\d+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ socket = match.groupdict().get("socket") | |
+ host = match.groupdict().get("host") | |
+ port = match.groupdict().get("port") | |
+ self.http["sockets"][socket] = {"host": host, "port": port} | |
# nsSocketTransport::SendStatus [this=143f4000 status=804b0007] | |
- elif msg['message'].startswith('nsSocketTransport::SendStatus '): | |
- match = re.search(r'^nsSocketTransport::SendStatus \[' | |
- r'this=(?P<socket>[\w\d]+) ' | |
- r'status=(?P<status>[\w\d]+)', msg['message']) | |
- if match: | |
- socket = match.groupdict().get('socket') | |
- status = match.groupdict().get('status') | |
- if status == '804b0007': | |
- if socket not in self.http['sockets']: | |
- self.http['sockets'][socket] = {} | |
- if 'start' not in self.http['sockets'][socket]: | |
- self.http['sockets'][socket]['start'] = msg['timestamp'] | |
+ elif msg["message"].startswith("nsSocketTransport::SendStatus "): | |
+ match = re.search( | |
+ r"^nsSocketTransport::SendStatus \[" | |
+ r"this=(?P<socket>[\w\d]+) " | |
+ r"status=(?P<status>[\w\d]+)", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ socket = match.groupdict().get("socket") | |
+ status = match.groupdict().get("status") | |
+ if status == "804b0007": | |
+ if socket not in self.http["sockets"]: | |
+ self.http["sockets"][socket] = {} | |
+ if "start" not in self.http["sockets"][socket]: | |
+ self.http["sockets"][socket]["start"] = msg["timestamp"] | |
# nsSocketTransport::OnSocketReady [this=143f4000 outFlags=2] | |
- elif msg['message'].startswith('nsSocketTransport::OnSocketReady '): | |
- match = re.search(r'^nsSocketTransport::OnSocketReady \[' | |
- r'this=(?P<socket>[\w\d]+) ', msg['message']) | |
- if match: | |
- socket = match.groupdict().get('socket') | |
- self.http['current_socket'] = socket | |
- if socket in self.http['sockets'] and 'end' not in self.http['sockets'][socket]: | |
- self.http['sockets'][socket]['end'] = msg['timestamp'] | |
+ elif msg["message"].startswith("nsSocketTransport::OnSocketReady "): | |
+ match = re.search( | |
+ r"^nsSocketTransport::OnSocketReady \[" r"this=(?P<socket>[\w\d]+) ", | |
+ msg["message"], | |
+ ) | |
+ if match: | |
+ socket = match.groupdict().get("socket") | |
+ self.http["current_socket"] = socket | |
+ if ( | |
+ socket in self.http["sockets"] | |
+ and "end" not in self.http["sockets"][socket] | |
+ ): | |
+ self.http["sockets"][socket]["end"] = msg["timestamp"] | |
def dns_entry(self, msg): | |
"""Process a single DNS log line""" | |
- if msg['message'].find('Calling getaddrinfo') > -1: | |
- match = re.search(r'Calling getaddrinfo for host \[(?P<host>[^\]]+)\]', msg['message']) | |
- if match: | |
- hostname = match.groupdict().get('host') | |
+ if msg["message"].find("Calling getaddrinfo") > -1: | |
+ match = re.search( | |
+ r"Calling getaddrinfo for host \[(?P<host>[^\]]+)\]", msg["message"] | |
+ ) | |
+ if match: | |
+ hostname = match.groupdict().get("host") | |
if hostname not in self.dns: | |
- self.dns[hostname] = {'start': msg['timestamp']} | |
- elif msg['message'].find('lookup completed for host') > -1: | |
- match = re.search(r'lookup completed for host \[(?P<host>[^\]]+)\]', msg['message']) | |
- if match: | |
- hostname = match.groupdict().get('host') | |
- if hostname in self.dns and 'end' not in self.dns[hostname]: | |
- self.dns[hostname]['end'] = msg['timestamp'] | |
+ self.dns[hostname] = {"start": msg["timestamp"]} | |
+ elif msg["message"].find("lookup completed for host") > -1: | |
+ match = re.search( | |
+ r"lookup completed for host \[(?P<host>[^\]]+)\]", msg["message"] | |
+ ) | |
+ if match: | |
+ hostname = match.groupdict().get("host") | |
+ if hostname in self.dns and "end" not in self.dns[hostname]: | |
+ self.dns[hostname]["end"] = msg["timestamp"] | |
+ | |
def main(): | |
""" Main entry-point when running on the command-line""" | |
import argparse | |
- parser = argparse.ArgumentParser(description='Chrome trace parser.', | |
- prog='trace-parser') | |
- parser.add_argument('-v', '--verbose', action='count', | |
- help="Increase verbosity (specify multiple times for more)" \ | |
- ". -vvvv for full debug output.") | |
- parser.add_argument('-l', '--logfile', help="File name for the mozilla log.") | |
- parser.add_argument('-s', '--start', | |
- help="Start Time in UTC with microseconds YYYY-MM-DD HH:MM:SS.xxxxxx.") | |
- parser.add_argument('-o', '--out', help="Output requests json file.") | |
+ | |
+ parser = argparse.ArgumentParser( | |
+ description="Chrome trace parser.", prog="trace-parser" | |
+ ) | |
+ parser.add_argument( | |
+ "-v", | |
+ "--verbose", | |
+ action="count", | |
+ help="Increase verbosity (specify multiple times for more)" | |
+ ". -vvvv for full debug output.", | |
+ ) | |
+ parser.add_argument("-l", "--logfile", help="File name for the mozilla log.") | |
+ parser.add_argument( | |
+ "-s", | |
+ "--start", | |
+ help="Start Time in UTC with microseconds YYYY-MM-DD HH:MM:SS.xxxxxx.", | |
+ ) | |
+ parser.add_argument("-o", "--out", help="Output requests json file.") | |
options, _ = parser.parse_known_args() | |
# Set up logging | |
log_level = logging.CRITICAL | |
if options.verbose == 1: | |
@@ -474,21 +597,25 @@ | |
elif options.verbose == 3: | |
log_level = logging.INFO | |
elif options.verbose >= 4: | |
log_level = logging.DEBUG | |
logging.basicConfig( | |
- level=log_level, format="%(asctime)s.%(msecs)03d - %(message)s", datefmt="%H:%M:%S") | |
+ level=log_level, | |
+ format="%(asctime)s.%(msecs)03d - %(message)s", | |
+ datefmt="%H:%M:%S", | |
+ ) | |
if not options.logfile or not options.start: | |
parser.error("Input devtools file or start time is not specified.") | |
parser = FirefoxLogParser() | |
requests = parser.process_logs(options.logfile, options.start) | |
if options.out: | |
- with open(options.out, 'w') as f_out: | |
+ with open(options.out, "w") as f_out: | |
json.dump(requests, f_out, indent=4) | |
-if __name__ == '__main__': | |
- #import cProfile | |
- #cProfile.run('main()', None, 2) | |
+ | |
+if __name__ == "__main__": | |
+ # import cProfile | |
+ # cProfile.run('main()', None, 2) | |
main() | |
--- internal/devtools.py 2019-02-06 16:55:03.352028 +0000 | |
+++ internal/devtools.py 2019-02-06 17:08:28.497516 +0000 | |
@@ -17,12 +17,13 @@ | |
from ws4py.client.threadedclient import WebSocketClient | |
class DevTools(object): | |
"""Interface into Chrome's remote dev tools protocol""" | |
+ | |
def __init__(self, options, job, task, use_devtools_video): | |
- self.url = "http://localhost:{0:d}/json".format(task['port']) | |
+ self.url = "http://localhost:{0:d}/json".format(task["port"]) | |
self.websocket = None | |
self.options = options | |
self.job = job | |
self.task = task | |
self.command_id = 0 | |
@@ -71,26 +72,30 @@ | |
self.nav_error_code = None | |
self.main_request = None | |
self.main_request_headers = None | |
self.response_started = False | |
self.start_timestamp = None | |
- self.path_base = os.path.join(self.task['dir'], self.task['prefix']) | |
- self.support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
- self.video_path = os.path.join(self.task['dir'], self.task['video_subdirectory']) | |
- self.video_prefix = os.path.join(self.video_path, 'ms_') | |
+ self.path_base = os.path.join(self.task["dir"], self.task["prefix"]) | |
+ self.support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
+ self.video_path = os.path.join( | |
+ self.task["dir"], self.task["video_subdirectory"] | |
+ ) | |
+ self.video_prefix = os.path.join(self.video_path, "ms_") | |
if not os.path.isdir(self.video_path): | |
os.makedirs(self.video_path) | |
self.body_fail_count = 0 | |
self.body_index = 0 | |
if self.bodies_zip_file is not None: | |
self.bodies_zip_file.close() | |
self.bodies_zip_file = None | |
self.html_body = False | |
self.all_bodies = False | |
- if 'bodies' in self.job and self.job['bodies']: | |
+ if "bodies" in self.job and self.job["bodies"]: | |
self.all_bodies = True | |
- if 'htmlbody' in self.job and self.job['htmlbody']: | |
+ if "htmlbody" in self.job and self.job["htmlbody"]: | |
self.html_body = True | |
def start_navigating(self): | |
"""Indicate that we are about to start a known-navigation""" | |
self.main_frame = None | |
@@ -98,10 +103,11 @@ | |
self.response_started = False | |
def wait_for_available(self, timeout): | |
"""Wait for the dev tools interface to become available (but don't connect)""" | |
import requests | |
+ | |
proxies = {"http": None, "https": None} | |
ret = False | |
end_time = monotonic.monotonic() + timeout | |
while not ret and monotonic.monotonic() < end_time: | |
try: | |
@@ -109,24 +115,27 @@ | |
if len(response.text): | |
tabs = response.json() | |
logging.debug("Dev Tools tabs: %s", json.dumps(tabs)) | |
if len(tabs): | |
for index in xrange(len(tabs)): | |
- if 'type' in tabs[index] and \ | |
- tabs[index]['type'] == 'page' and \ | |
- 'webSocketDebuggerUrl' in tabs[index] and \ | |
- 'id' in tabs[index]: | |
+ if ( | |
+ "type" in tabs[index] | |
+ and tabs[index]["type"] == "page" | |
+ and "webSocketDebuggerUrl" in tabs[index] | |
+ and "id" in tabs[index] | |
+ ): | |
ret = True | |
- logging.debug('Dev tools interface is available') | |
+ logging.debug("Dev tools interface is available") | |
except Exception as err: | |
logging.debug("Connect to dev tools Error: %s", err.__str__()) | |
time.sleep(0.5) | |
return ret | |
def connect(self, timeout): | |
"""Connect to the browser""" | |
import requests | |
+ | |
session = requests.session() | |
proxies = {"http": None, "https": None} | |
ret = False | |
end_time = monotonic.monotonic() + timeout | |
while not ret and monotonic.monotonic() < end_time: | |
@@ -136,42 +145,52 @@ | |
tabs = response.json() | |
logging.debug("Dev Tools tabs: %s", json.dumps(tabs)) | |
if len(tabs): | |
websocket_url = None | |
for index in xrange(len(tabs)): | |
- if 'type' in tabs[index] and \ | |
- tabs[index]['type'] == 'page' and \ | |
- 'webSocketDebuggerUrl' in tabs[index] and \ | |
- 'id' in tabs[index]: | |
+ if ( | |
+ "type" in tabs[index] | |
+ and tabs[index]["type"] == "page" | |
+ and "webSocketDebuggerUrl" in tabs[index] | |
+ and "id" in tabs[index] | |
+ ): | |
if websocket_url is None: | |
- websocket_url = tabs[index]['webSocketDebuggerUrl'] | |
- self.tab_id = tabs[index]['id'] | |
+ websocket_url = tabs[index]["webSocketDebuggerUrl"] | |
+ self.tab_id = tabs[index]["id"] | |
else: | |
# Close extra tabs | |
try: | |
- session.get(self.url + '/close/' + tabs[index]['id'], | |
- proxies=proxies) | |
+ session.get( | |
+ self.url + "/close/" + tabs[index]["id"], | |
+ proxies=proxies, | |
+ ) | |
except Exception: | |
pass | |
if websocket_url is not None: | |
try: | |
self.websocket = DevToolsClient(websocket_url) | |
self.websocket.connect() | |
ret = True | |
except Exception as err: | |
- logging.debug("Connect to dev tools websocket Error: %s", | |
- err.__str__()) | |
+ logging.debug( | |
+ "Connect to dev tools websocket Error: %s", | |
+ err.__str__(), | |
+ ) | |
if not ret: | |
# try connecting to 127.0.0.1 instead of localhost | |
try: | |
- websocket_url = websocket_url.replace('localhost', '127.0.0.1') | |
+ websocket_url = websocket_url.replace( | |
+ "localhost", "127.0.0.1" | |
+ ) | |
self.websocket = DevToolsClient(websocket_url) | |
self.websocket.connect() | |
ret = True | |
except Exception as err: | |
- logging.debug("Connect to dev tools websocket Error: %s", | |
- err.__str__()) | |
+ logging.debug( | |
+ "Connect to dev tools websocket Error: %s", | |
+ err.__str__(), | |
+ ) | |
else: | |
time.sleep(0.5) | |
else: | |
time.sleep(0.5) | |
except Exception as err: | |
@@ -179,19 +198,27 @@ | |
time.sleep(0.5) | |
return ret | |
def prepare_browser(self): | |
"""Run any one-time startup preparation before testing starts""" | |
- self.send_command('Target.setAutoAttach', | |
- {'autoAttach': True, 'waitForDebuggerOnStart': True}) | |
- response = self.send_command('Target.getTargets', {}, wait=True) | |
- if response is not None and 'result' in response and 'targetInfos' in response['result']: | |
- for target in response['result']['targetInfos']: | |
+ self.send_command( | |
+ "Target.setAutoAttach", {"autoAttach": True, "waitForDebuggerOnStart": True} | |
+ ) | |
+ response = self.send_command("Target.getTargets", {}, wait=True) | |
+ if ( | |
+ response is not None | |
+ and "result" in response | |
+ and "targetInfos" in response["result"] | |
+ ): | |
+ for target in response["result"]["targetInfos"]: | |
logging.debug(target) | |
- if 'type' in target and target['type'] == 'service_worker': | |
- self.send_command('Target.attachToTarget', {'targetId': target['targetId']}, | |
- wait=True) | |
+ if "type" in target and target["type"] == "service_worker": | |
+ self.send_command( | |
+ "Target.attachToTarget", | |
+ {"targetId": target["targetId"]}, | |
+ wait=True, | |
+ ) | |
def close(self, close_tab=True): | |
"""Close the dev tools connection""" | |
if self.websocket: | |
try: | |
@@ -199,254 +226,300 @@ | |
except Exception: | |
pass | |
self.websocket = None | |
if close_tab and self.tab_id is not None: | |
import requests | |
+ | |
proxies = {"http": None, "https": None} | |
try: | |
- requests.get(self.url + '/close/' + self.tab_id, proxies=proxies) | |
+ requests.get(self.url + "/close/" + self.tab_id, proxies=proxies) | |
except Exception: | |
pass | |
self.tab_id = None | |
def start_recording(self): | |
"""Start capturing dev tools, timeline and trace data""" | |
self.prepare() | |
- if (self.bodies_zip_file is None and (self.html_body or self.all_bodies)): | |
- self.bodies_zip_file = zipfile.ZipFile(self.path_base + '_bodies.zip', 'w', | |
- zipfile.ZIP_DEFLATED) | |
+ if self.bodies_zip_file is None and (self.html_body or self.all_bodies): | |
+ self.bodies_zip_file = zipfile.ZipFile( | |
+ self.path_base + "_bodies.zip", "w", zipfile.ZIP_DEFLATED | |
+ ) | |
self.recording = True | |
- if self.use_devtools_video and self.job['video'] and self.task['log_data']: | |
- self.grab_screenshot(self.video_prefix + '000000.jpg', png=False) | |
+ if self.use_devtools_video and self.job["video"] and self.task["log_data"]: | |
+ self.grab_screenshot(self.video_prefix + "000000.jpg", png=False) | |
elif self.mobile_viewport is None and not self.options.android: | |
# grab an initial screen shot to get the crop rectangle | |
try: | |
- tmp_file = os.path.join(self.task['dir'], 'tmp.png') | |
+ tmp_file = os.path.join(self.task["dir"], "tmp.png") | |
self.grab_screenshot(tmp_file) | |
os.remove(tmp_file) | |
except Exception: | |
pass | |
self.flush_pending_messages() | |
- self.send_command('Page.enable', {}) | |
- self.send_command('Inspector.enable', {}) | |
- self.send_command('ServiceWorker.enable', {}) | |
- self.send_command('Network.enable', {}) | |
+ self.send_command("Page.enable", {}) | |
+ self.send_command("Inspector.enable", {}) | |
+ self.send_command("ServiceWorker.enable", {}) | |
+ self.send_command("Network.enable", {}) | |
if self.headers: | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, wait=True) | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", {"headers": self.headers}, wait=True | |
+ ) | |
if len(self.workers): | |
for target in self.workers: | |
- self.send_command('Network.enable', {}, target_id=target['targetId']) | |
+ self.send_command("Network.enable", {}, target_id=target["targetId"]) | |
if self.headers: | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, target_id=target['targetId'], | |
- wait=True) | |
- if 'user_agent_string' in self.job: | |
- self.send_command('Network.setUserAgentOverride', | |
- {'userAgent': self.job['user_agent_string']}, wait=True) | |
- if len(self.task['block']): | |
- for block in self.task['block']: | |
- self.send_command('Network.addBlockedURL', {'url': block}) | |
- self.send_command('Network.setBlockedURLs', {'urls': self.task['block']}) | |
- if 'overrideHosts' in self.task and self.task['overrideHosts']: | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", | |
+ {"headers": self.headers}, | |
+ target_id=target["targetId"], | |
+ wait=True, | |
+ ) | |
+ if "user_agent_string" in self.job: | |
+ self.send_command( | |
+ "Network.setUserAgentOverride", | |
+ {"userAgent": self.job["user_agent_string"]}, | |
+ wait=True, | |
+ ) | |
+ if len(self.task["block"]): | |
+ for block in self.task["block"]: | |
+ self.send_command("Network.addBlockedURL", {"url": block}) | |
+ self.send_command("Network.setBlockedURLs", {"urls": self.task["block"]}) | |
+ if "overrideHosts" in self.task and self.task["overrideHosts"]: | |
patterns = [] | |
- for host in self.task['overrideHosts']: | |
- if host == '*': | |
- patterns.append({'urlPattern': 'http://*'}) | |
- patterns.append({'urlPattern': 'https://*'}) | |
+ for host in self.task["overrideHosts"]: | |
+ if host == "*": | |
+ patterns.append({"urlPattern": "http://*"}) | |
+ patterns.append({"urlPattern": "https://*"}) | |
else: | |
- patterns.append({'urlPattern': 'http://{0}*'.format(host)}) | |
- patterns.append({'urlPattern': 'https://{0}*'.format(host)}) | |
- self.send_command('Network.setRequestInterception', {'patterns': patterns}) | |
- if self.task['log_data']: | |
- self.send_command('Security.enable', {}) | |
- self.send_command('Console.enable', {}) | |
- if 'coverage' in self.job and self.job['coverage']: | |
- self.send_command('DOM.enable', {}) | |
- self.send_command('CSS.enable', {}) | |
- self.send_command('CSS.startRuleUsageTracking', {}) | |
- self.send_command('Profiler.enable', {}) | |
- self.send_command('Profiler.setSamplingInterval', {'interval': 100}) | |
- self.send_command('Profiler.start', {}) | |
- if 'trace' in self.job and self.job['trace']: | |
- if 'traceCategories' in self.job: | |
- trace = self.job['traceCategories'] | |
- if not trace.startswith('-*,'): | |
- trace = '-*,' + trace | |
- if trace.find('netlog') >= 0: | |
- self.job['keep_netlog'] = True | |
+ patterns.append({"urlPattern": "http://{0}*".format(host)}) | |
+ patterns.append({"urlPattern": "https://{0}*".format(host)}) | |
+ self.send_command("Network.setRequestInterception", {"patterns": patterns}) | |
+ if self.task["log_data"]: | |
+ self.send_command("Security.enable", {}) | |
+ self.send_command("Console.enable", {}) | |
+ if "coverage" in self.job and self.job["coverage"]: | |
+ self.send_command("DOM.enable", {}) | |
+ self.send_command("CSS.enable", {}) | |
+ self.send_command("CSS.startRuleUsageTracking", {}) | |
+ self.send_command("Profiler.enable", {}) | |
+ self.send_command("Profiler.setSamplingInterval", {"interval": 100}) | |
+ self.send_command("Profiler.start", {}) | |
+ if "trace" in self.job and self.job["trace"]: | |
+ if "traceCategories" in self.job: | |
+ trace = self.job["traceCategories"] | |
+ if not trace.startswith("-*,"): | |
+ trace = "-*," + trace | |
+ if trace.find("netlog") >= 0: | |
+ self.job["keep_netlog"] = True | |
else: | |
- trace = "-*,toplevel,blink,v8,cc,gpu,blink.net," \ | |
- "disabled-by-default-v8.runtime_stats" | |
- self.job['keep_netlog'] = True | |
+ trace = ( | |
+ "-*,toplevel,blink,v8,cc,gpu,blink.net," | |
+ "disabled-by-default-v8.runtime_stats" | |
+ ) | |
+ self.job["keep_netlog"] = True | |
else: | |
- self.job['keep_netlog'] = False | |
+ self.job["keep_netlog"] = False | |
trace = "-*" | |
- if 'netlog' in self.job and self.job['netlog']: | |
- self.job['keep_netlog'] = True | |
- if 'timeline' in self.job and self.job['timeline']: | |
- trace += ',' + ','.join([ | |
- 'blink.console', | |
- 'devtools.timeline' | |
- ]) | |
- if 'timeline_fps' in self.job and self.job['timeline_fps']: | |
- trace += ',' + ','.join([ | |
- 'disabled-by-default-devtools.timeline', | |
- 'disabled-by-default-devtools.timeline.frame' | |
- ]) | |
- if self.use_devtools_video and self.job['video']: | |
+ if "netlog" in self.job and self.job["netlog"]: | |
+ self.job["keep_netlog"] = True | |
+ if "timeline" in self.job and self.job["timeline"]: | |
+ trace += "," + ",".join(["blink.console", "devtools.timeline"]) | |
+ if "timeline_fps" in self.job and self.job["timeline_fps"]: | |
+ trace += "," + ",".join( | |
+ [ | |
+ "disabled-by-default-devtools.timeline", | |
+ "disabled-by-default-devtools.timeline.frame", | |
+ ] | |
+ ) | |
+ if self.use_devtools_video and self.job["video"]: | |
trace += ",disabled-by-default-devtools.screenshot" | |
self.recording_video = True | |
# Add the required trace events | |
- if trace.find(',rail') == -1: | |
- trace += ',rail' | |
- if trace.find(',blink.user_timing') == -1: | |
- trace += ',blink.user_timing' | |
- if trace.find(',netlog') == -1: | |
- trace += ',netlog' | |
- if trace.find(',disabled-by-default-blink.feature_usage') == -1: | |
- trace += ',disabled-by-default-blink.feature_usage' | |
+ if trace.find(",rail") == -1: | |
+ trace += ",rail" | |
+ if trace.find(",blink.user_timing") == -1: | |
+ trace += ",blink.user_timing" | |
+ if trace.find(",netlog") == -1: | |
+ trace += ",netlog" | |
+ if trace.find(",disabled-by-default-blink.feature_usage") == -1: | |
+ trace += ",disabled-by-default-blink.feature_usage" | |
self.trace_enabled = True | |
- self.send_command('Tracing.start', | |
- {'categories': trace, | |
- 'options': 'record-as-much-as-possible'}, | |
- wait=True) | |
+ self.send_command( | |
+ "Tracing.start", | |
+ {"categories": trace, "options": "record-as-much-as-possible"}, | |
+ wait=True, | |
+ ) | |
now = monotonic.monotonic() | |
- if not self.task['stop_at_onload']: | |
+ if not self.task["stop_at_onload"]: | |
self.last_activity = now | |
if self.page_loaded is not None: | |
self.page_loaded = now | |
def stop_capture(self): | |
"""Do any quick work to stop things that are capturing data""" | |
- self.send_command('Inspector.disable', {}) | |
- self.send_command('Page.disable', {}) | |
+ self.send_command("Inspector.disable", {}) | |
+ self.send_command("Page.disable", {}) | |
self.start_collecting_trace() | |
def stop_recording(self): | |
"""Stop capturing dev tools, timeline and trace data""" | |
- if self.task['log_data']: | |
- if 'coverage' in self.job and self.job['coverage']: | |
+ if self.task["log_data"]: | |
+ if "coverage" in self.job and self.job["coverage"]: | |
try: | |
coverage = {} | |
# process the JS coverage | |
- self.send_command('Profiler.stop', {}) | |
- response = self.send_command('Profiler.getBestEffortCoverage', | |
- {}, wait=True, timeout=30) | |
- if 'result' in response and 'result' in response['result']: | |
- for script in response['result']['result']: | |
- if 'url' in script and script['url'] and 'functions' in script: | |
- if script['url'] not in coverage: | |
- coverage[script['url']] = {} | |
- if 'JS' not in coverage[script['url']]: | |
- coverage[script['url']]['JS'] = [] | |
- for function in script['functions']: | |
- if 'ranges' in function: | |
- for chunk in function['ranges']: | |
- coverage[script['url']]['JS'].append({ | |
- 'startOffset': chunk['startOffset'], | |
- 'endOffset': chunk['endOffset'], | |
- 'count': chunk['count'], | |
- 'used': True if chunk['count'] else False | |
- }) | |
- self.send_command('Profiler.disable', {}) | |
+ self.send_command("Profiler.stop", {}) | |
+ response = self.send_command( | |
+ "Profiler.getBestEffortCoverage", {}, wait=True, timeout=30 | |
+ ) | |
+ if "result" in response and "result" in response["result"]: | |
+ for script in response["result"]["result"]: | |
+ if ( | |
+ "url" in script | |
+ and script["url"] | |
+ and "functions" in script | |
+ ): | |
+ if script["url"] not in coverage: | |
+ coverage[script["url"]] = {} | |
+ if "JS" not in coverage[script["url"]]: | |
+ coverage[script["url"]]["JS"] = [] | |
+ for function in script["functions"]: | |
+ if "ranges" in function: | |
+ for chunk in function["ranges"]: | |
+ coverage[script["url"]]["JS"].append( | |
+ { | |
+ "startOffset": chunk["startOffset"], | |
+ "endOffset": chunk["endOffset"], | |
+ "count": chunk["count"], | |
+ "used": True | |
+ if chunk["count"] | |
+ else False, | |
+ } | |
+ ) | |
+ self.send_command("Profiler.disable", {}) | |
# Process the css coverage | |
- response = self.send_command('CSS.stopRuleUsageTracking', | |
- {}, wait=True, timeout=30) | |
- if 'result' in response and 'ruleUsage' in response['result']: | |
- rule_usage = response['result']['ruleUsage'] | |
+ response = self.send_command( | |
+ "CSS.stopRuleUsageTracking", {}, wait=True, timeout=30 | |
+ ) | |
+ if "result" in response and "ruleUsage" in response["result"]: | |
+ rule_usage = response["result"]["ruleUsage"] | |
for rule in rule_usage: | |
- if 'styleSheetId' in rule and rule['styleSheetId'] in self.stylesheets: | |
- sheet_id = rule['styleSheetId'] | |
+ if ( | |
+ "styleSheetId" in rule | |
+ and rule["styleSheetId"] in self.stylesheets | |
+ ): | |
+ sheet_id = rule["styleSheetId"] | |
url = self.stylesheets[sheet_id] | |
if url not in coverage: | |
coverage[url] = {} | |
- if 'CSS' not in coverage[url]: | |
- coverage[url]['CSS'] = [] | |
- coverage[url]['CSS'].append({ | |
- 'startOffset': rule['startOffset'], | |
- 'endOffset': rule['endOffset'], | |
- 'used': rule['used'] | |
- }) | |
+ if "CSS" not in coverage[url]: | |
+ coverage[url]["CSS"] = [] | |
+ coverage[url]["CSS"].append( | |
+ { | |
+ "startOffset": rule["startOffset"], | |
+ "endOffset": rule["endOffset"], | |
+ "used": rule["used"], | |
+ } | |
+ ) | |
if coverage: | |
summary = {} | |
- categories = ['JS', 'CSS'] | |
+ categories = ["JS", "CSS"] | |
for url in coverage: | |
for category in categories: | |
if category in coverage[url]: | |
total_bytes = 0 | |
used_bytes = 0 | |
for chunk in coverage[url][category]: | |
- range_bytes = chunk['endOffset'] - chunk['startOffset'] | |
+ range_bytes = ( | |
+ chunk["endOffset"] - chunk["startOffset"] | |
+ ) | |
if range_bytes > 0: | |
total_bytes += range_bytes | |
- if chunk['used']: | |
+ if chunk["used"]: | |
used_bytes += range_bytes | |
used_pct = 100.0 | |
if total_bytes > 0: | |
- used_pct = float((used_bytes * 10000) / total_bytes) / 100.0 | |
+ used_pct = ( | |
+ float((used_bytes * 10000) / total_bytes) | |
+ / 100.0 | |
+ ) | |
if url not in summary: | |
summary[url] = {} | |
- summary[url]['{0}_bytes'.format(category)] = total_bytes | |
- summary[url]['{0}_bytes_used'.format(category)] = used_bytes | |
- summary[url]['{0}_percent_used'.format(category)] = used_pct | |
- path = self.path_base + '_coverage.json.gz' | |
- with gzip.open(path, 'wb', 7) as f_out: | |
+ summary[url][ | |
+ "{0}_bytes".format(category) | |
+ ] = total_bytes | |
+ summary[url][ | |
+ "{0}_bytes_used".format(category) | |
+ ] = used_bytes | |
+ summary[url][ | |
+ "{0}_percent_used".format(category) | |
+ ] = used_pct | |
+ path = self.path_base + "_coverage.json.gz" | |
+ with gzip.open(path, "wb", 7) as f_out: | |
json.dump(summary, f_out) | |
- self.send_command('CSS.disable', {}) | |
- self.send_command('DOM.disable', {}) | |
+ self.send_command("CSS.disable", {}) | |
+ self.send_command("DOM.disable", {}) | |
except Exception as err: | |
logging.exception(err) | |
self.recording = False | |
self.flush_pending_messages() | |
- if self.task['log_data']: | |
- self.send_command('Security.disable', {}) | |
- self.send_command('Console.disable', {}) | |
+ if self.task["log_data"]: | |
+ self.send_command("Security.disable", {}) | |
+ self.send_command("Console.disable", {}) | |
self.get_response_bodies() | |
if self.bodies_zip_file is not None: | |
self.bodies_zip_file.close() | |
self.bodies_zip_file = None | |
- self.send_command('Network.disable', {}) | |
+ self.send_command("Network.disable", {}) | |
if len(self.workers): | |
for target in self.workers: | |
- self.send_command('Network.disable', {}, target_id=target['targetId']) | |
- self.send_command('ServiceWorker.disable', {}) | |
+ self.send_command("Network.disable", {}, target_id=target["targetId"]) | |
+ self.send_command("ServiceWorker.disable", {}) | |
if self.dev_tools_file is not None: | |
self.dev_tools_file.write("\n]") | |
self.dev_tools_file.close() | |
self.dev_tools_file = None | |
def start_collecting_trace(self): | |
"""Kick off the trace processing asynchronously""" | |
if self.trace_enabled: | |
keep_timeline = True | |
- if 'discard_timeline' in self.job and self.job['discard_timeline']: | |
+ if "discard_timeline" in self.job and self.job["discard_timeline"]: | |
keep_timeline = False | |
video_prefix = self.video_prefix if self.recording_video else None | |
- self.websocket.start_processing_trace(self.path_base, video_prefix, | |
- self.options, self.job, self.task, | |
- self.start_timestamp, keep_timeline) | |
- self.send_command('Tracing.end', {}) | |
+ self.websocket.start_processing_trace( | |
+ self.path_base, | |
+ video_prefix, | |
+ self.options, | |
+ self.job, | |
+ self.task, | |
+ self.start_timestamp, | |
+ keep_timeline, | |
+ ) | |
+ self.send_command("Tracing.end", {}) | |
def collect_trace(self): | |
"""Stop tracing and collect the results""" | |
if self.trace_enabled: | |
self.trace_enabled = False | |
start = monotonic.monotonic() | |
# Keep pumping messages until we get tracingComplete or | |
# we get a gap of 30 seconds between messages | |
if self.websocket: | |
- logging.info('Collecting trace events') | |
+ logging.info("Collecting trace events") | |
done = False | |
no_message_count = 0 | |
while not done and no_message_count < 30: | |
try: | |
raw = self.websocket.get_message(1) | |
if raw is not None and len(raw): | |
no_message_count = 0 | |
msg = json.loads(raw) | |
- if 'method' in msg and msg['method'] == 'Tracing.tracingComplete': | |
+ if ( | |
+ "method" in msg | |
+ and msg["method"] == "Tracing.tracingComplete" | |
+ ): | |
done = True | |
else: | |
no_message_count += 1 | |
except Exception: | |
pass | |
@@ -457,148 +530,186 @@ | |
def get_response_body(self, request_id): | |
"""Retrieve and store the given response body (if necessary)""" | |
if request_id not in self.response_bodies and self.body_fail_count < 3: | |
request = self.get_request(request_id) | |
- if request is not None and 'status' in request and request['status'] == 200 and \ | |
- 'response_headers' in request: | |
- content_length = self.get_header_value(request['response_headers'], | |
- 'Content-Length') | |
+ if ( | |
+ request is not None | |
+ and "status" in request | |
+ and request["status"] == 200 | |
+ and "response_headers" in request | |
+ ): | |
+ content_length = self.get_header_value( | |
+ request["response_headers"], "Content-Length" | |
+ ) | |
if content_length is not None: | |
- content_length = int(re.search(r'\d+', str(content_length)).group()) | |
- elif 'transfer_size' in request: | |
- content_length = request['transfer_size'] | |
+ content_length = int(re.search(r"\d+", str(content_length)).group()) | |
+ elif "transfer_size" in request: | |
+ content_length = request["transfer_size"] | |
else: | |
content_length = 0 | |
- logging.debug('Getting body for %s (%d) - %s', request_id, | |
- content_length, request['url']) | |
- path = os.path.join(self.task['dir'], 'bodies') | |
+ logging.debug( | |
+ "Getting body for %s (%d) - %s", | |
+ request_id, | |
+ content_length, | |
+ request["url"], | |
+ ) | |
+ path = os.path.join(self.task["dir"], "bodies") | |
if not os.path.isdir(path): | |
os.makedirs(path) | |
body_file_path = os.path.join(path, request_id) | |
if not os.path.exists(body_file_path): | |
# Only grab bodies needed for optimization checks | |
# or if we are saving full bodies | |
need_body = True | |
- content_type = self.get_header_value(request['response_headers'], | |
- 'Content-Type') | |
+ content_type = self.get_header_value( | |
+ request["response_headers"], "Content-Type" | |
+ ) | |
is_text = False | |
if content_type is not None: | |
content_type = content_type.lower() | |
- if content_type.startswith('text/') or \ | |
- content_type.find('javascript') >= 0 or \ | |
- content_type.find('json') >= 0 or \ | |
- content_type.find('/svg+xml'): | |
+ if ( | |
+ content_type.startswith("text/") | |
+ or content_type.find("javascript") >= 0 | |
+ or content_type.find("json") >= 0 | |
+ or content_type.find("/svg+xml") | |
+ ): | |
is_text = True | |
# Ignore video files over 10MB | |
- if content_type[:6] == 'video/' and content_length > 10000000: | |
+ if content_type[:6] == "video/" and content_length > 10000000: | |
need_body = False | |
- optimization_checks_disabled = bool('noopt' in self.job and self.job['noopt']) | |
+ optimization_checks_disabled = bool( | |
+ "noopt" in self.job and self.job["noopt"] | |
+ ) | |
if optimization_checks_disabled and self.bodies_zip_file is None: | |
need_body = False | |
if need_body: | |
target_id = None | |
- if request_id in self.requests and 'targetId' in self.requests[request_id]: | |
- target_id = self.requests[request_id]['targetId'] | |
- response = self.send_command("Network.getResponseBody", | |
- {'requestId': request_id}, wait=True, | |
- target_id=target_id) | |
+ if ( | |
+ request_id in self.requests | |
+ and "targetId" in self.requests[request_id] | |
+ ): | |
+ target_id = self.requests[request_id]["targetId"] | |
+ response = self.send_command( | |
+ "Network.getResponseBody", | |
+ {"requestId": request_id}, | |
+ wait=True, | |
+ target_id=target_id, | |
+ ) | |
if response is None: | |
self.body_fail_count += 1 | |
- logging.warning('No response to body request for request %s', | |
- request_id) | |
- elif 'result' not in response or \ | |
- 'body' not in response['result']: | |
+ logging.warning( | |
+ "No response to body request for request %s", request_id | |
+ ) | |
+ elif ( | |
+ "result" not in response or "body" not in response["result"] | |
+ ): | |
self.body_fail_count = 0 | |
- logging.warning('Missing response body for request %s', | |
- request_id) | |
- elif len(response['result']['body']): | |
+ logging.warning( | |
+ "Missing response body for request %s", request_id | |
+ ) | |
+ elif len(response["result"]["body"]): | |
self.body_fail_count = 0 | |
# Write the raw body to a file (all bodies) | |
- if 'base64Encoded' in response['result'] and \ | |
- response['result']['base64Encoded']: | |
- body = base64.b64decode(response['result']['body']) | |
+ if ( | |
+ "base64Encoded" in response["result"] | |
+ and response["result"]["base64Encoded"] | |
+ ): | |
+ body = base64.b64decode(response["result"]["body"]) | |
# Run a sanity check to make sure it isn't binary | |
if self.bodies_zip_file is not None and is_text: | |
try: | |
json.loads('"' + body.replace('"', '\\"') + '"') | |
except Exception: | |
is_text = False | |
else: | |
- body = response['result']['body'].encode('utf-8') | |
+ body = response["result"]["body"].encode("utf-8") | |
is_text = True | |
# Add text bodies to the zip archive | |
store_body = self.all_bodies | |
if self.html_body and request_id == self.main_request: | |
store_body = True | |
- if store_body and self.bodies_zip_file is not None and is_text: | |
+ if ( | |
+ store_body | |
+ and self.bodies_zip_file is not None | |
+ and is_text | |
+ ): | |
self.body_index += 1 | |
- name = '{0:03d}-{1}-body.txt'.format(self.body_index, request_id) | |
+ name = "{0:03d}-{1}-body.txt".format( | |
+ self.body_index, request_id | |
+ ) | |
self.bodies_zip_file.writestr(name, body) | |
- logging.debug('%s: Stored body in zip', request_id) | |
- logging.debug('%s: Body length: %d', request_id, len(body)) | |
+ logging.debug("%s: Stored body in zip", request_id) | |
+ logging.debug("%s: Body length: %d", request_id, len(body)) | |
self.response_bodies[request_id] = body | |
- with open(body_file_path, 'wb') as body_file: | |
+ with open(body_file_path, "wb") as body_file: | |
body_file.write(body) | |
else: | |
self.body_fail_count = 0 | |
- self.response_bodies[request_id] = response['result']['body'] | |
+ self.response_bodies[request_id] = response["result"][ | |
+ "body" | |
+ ] | |
def get_response_bodies(self): | |
"""Retrieve all of the response bodies for the requests that we know about""" | |
requests = self.get_requests() | |
- if self.task['error'] is None and requests: | |
+ if self.task["error"] is None and requests: | |
for request_id in requests: | |
self.get_response_body(request_id) | |
def get_request(self, request_id): | |
"""Get the given request details if it is a real request""" | |
request = None | |
- if request_id in self.requests and 'fromNet' in self.requests[request_id] \ | |
- and self.requests[request_id]['fromNet']: | |
+ if ( | |
+ request_id in self.requests | |
+ and "fromNet" in self.requests[request_id] | |
+ and self.requests[request_id]["fromNet"] | |
+ ): | |
events = self.requests[request_id] | |
- request = {'id': request_id} | |
+ request = {"id": request_id} | |
# See if we have a body | |
- body_path = os.path.join(self.task['dir'], 'bodies') | |
+ body_path = os.path.join(self.task["dir"], "bodies") | |
body_file_path = os.path.join(body_path, request_id) | |
if os.path.isfile(body_file_path): | |
- request['body'] = body_file_path | |
+ request["body"] = body_file_path | |
if request_id in self.response_bodies: | |
- request['response_body'] = self.response_bodies[request_id] | |
+ request["response_body"] = self.response_bodies[request_id] | |
# Get the headers from responseReceived | |
- if 'response' in events: | |
- response = events['response'][-1] | |
- if 'response' in response: | |
- if 'url' in response['response']: | |
- request['url'] = response['response']['url'] | |
- if 'status' in response['response']: | |
- request['status'] = response['response']['status'] | |
- if 'headers' in response['response']: | |
- request['response_headers'] = response['response']['headers'] | |
- if 'requestHeaders' in response['response']: | |
- request['request_headers'] = response['response']['requestHeaders'] | |
- if 'connectionId' in response['response']: | |
- request['connection'] = response['response']['connectionId'] | |
+ if "response" in events: | |
+ response = events["response"][-1] | |
+ if "response" in response: | |
+ if "url" in response["response"]: | |
+ request["url"] = response["response"]["url"] | |
+ if "status" in response["response"]: | |
+ request["status"] = response["response"]["status"] | |
+ if "headers" in response["response"]: | |
+ request["response_headers"] = response["response"]["headers"] | |
+ if "requestHeaders" in response["response"]: | |
+ request["request_headers"] = response["response"][ | |
+ "requestHeaders" | |
+ ] | |
+ if "connectionId" in response["response"]: | |
+ request["connection"] = response["response"]["connectionId"] | |
# Fill in any missing details from the requestWillBeSent event | |
- if 'request' in events: | |
- req = events['request'][-1] | |
- if 'request' in req: | |
- if 'url' not in request and 'url' in req['request']: | |
- request['url'] = req['request']['url'] | |
- if 'request_headers' not in request and 'headers' in req['request']: | |
- request['request_headers'] = req['request']['headers'] | |
+ if "request" in events: | |
+ req = events["request"][-1] | |
+ if "request" in req: | |
+ if "url" not in request and "url" in req["request"]: | |
+ request["url"] = req["request"]["url"] | |
+ if "request_headers" not in request and "headers" in req["request"]: | |
+ request["request_headers"] = req["request"]["headers"] | |
# Get the response length from the data events | |
- if 'finished' in events and 'encodedDataLength' in events['finished']: | |
- request['transfer_size'] = events['finished']['encodedDataLength'] | |
- elif 'data' in events: | |
+ if "finished" in events and "encodedDataLength" in events["finished"]: | |
+ request["transfer_size"] = events["finished"]["encodedDataLength"] | |
+ elif "data" in events: | |
transfer_size = 0 | |
- for data in events['data']: | |
- if 'encodedDataLength' in data: | |
- transfer_size += data['encodedDataLength'] | |
- elif 'dataLength' in data: | |
- transfer_size += data['dataLength'] | |
- request['transfer_size'] = transfer_size | |
+ for data in events["data"]: | |
+ if "encodedDataLength" in data: | |
+ transfer_size += data["encodedDataLength"] | |
+ elif "dataLength" in data: | |
+ transfer_size += data["dataLength"] | |
+ request["transfer_size"] = transfer_size | |
return request | |
def get_requests(self): | |
"""Get a dictionary of all of the requests and the details (headers, body file)""" | |
requests = None | |
@@ -631,17 +742,20 @@ | |
"""Send a raw dev tools message and optionally wait for the response""" | |
ret = None | |
if target_id is not None: | |
self.command_id += 1 | |
command_id = int(self.command_id) | |
- msg = {'id': command_id, 'method': method, 'params': params} | |
+ msg = {"id": command_id, "method": method, "params": params} | |
if wait: | |
self.pending_commands.append(command_id) | |
end_time = monotonic.monotonic() + timeout | |
- self.send_command('Target.sendMessageToTarget', | |
- {'targetId': target_id, 'message': json.dumps(msg)}, | |
- wait=True, timeout=timeout) | |
+ self.send_command( | |
+ "Target.sendMessageToTarget", | |
+ {"targetId": target_id, "message": json.dumps(msg)}, | |
+ wait=True, | |
+ timeout=timeout, | |
+ ) | |
if wait: | |
if command_id in self.command_responses: | |
ret = self.command_responses[command_id] | |
del self.command_responses[command_id] | |
else: | |
@@ -660,11 +774,11 @@ | |
elif self.websocket: | |
self.command_id += 1 | |
command_id = int(self.command_id) | |
if wait: | |
self.pending_commands.append(command_id) | |
- msg = {'id': command_id, 'method': method, 'params': params} | |
+ msg = {"id": command_id, "method": method, "params": params} | |
try: | |
out = json.dumps(msg) | |
logging.debug("Sending: %s", out[:1000]) | |
self.websocket.send(out) | |
if wait: | |
@@ -687,11 +801,11 @@ | |
def wait_for_page_load(self): | |
"""Wait for the page load and activity to finish""" | |
if self.websocket: | |
start_time = monotonic.monotonic() | |
- end_time = start_time + self.task['time_limit'] | |
+ end_time = start_time + self.task["time_limit"] | |
done = False | |
interval = 1 | |
while not done: | |
if self.page_loaded is not None: | |
interval = 0.1 | |
@@ -707,51 +821,71 @@ | |
now = monotonic.monotonic() | |
elapsed_test = now - start_time | |
if self.nav_error is not None: | |
done = True | |
if self.page_loaded is None: | |
- self.task['error'] = self.nav_error | |
+ self.task["error"] = self.nav_error | |
if self.nav_error_code is not None: | |
- self.task['page_data']['result'] = self.nav_error_code | |
+ self.task["page_data"]["result"] = self.nav_error_code | |
else: | |
- self.task['page_data']['result'] = 12999 | |
+ self.task["page_data"]["result"] = 12999 | |
elif now >= end_time: | |
done = True | |
# only consider it an error if we didn't get a page load event | |
if self.page_loaded is None: | |
- self.task['error'] = "Page Load Timeout" | |
- self.task['page_data']['result'] = 99997 | |
- elif 'time' not in self.job or elapsed_test > self.job['time']: | |
+ self.task["error"] = "Page Load Timeout" | |
+ self.task["page_data"]["result"] = 99997 | |
+ elif "time" not in self.job or elapsed_test > self.job["time"]: | |
elapsed_activity = now - self.last_activity | |
- elapsed_page_load = now - self.page_loaded if self.page_loaded else 0 | |
- if elapsed_page_load >= 1 and elapsed_activity >= self.task['activity_time']: | |
+ elapsed_page_load = ( | |
+ now - self.page_loaded if self.page_loaded else 0 | |
+ ) | |
+ if ( | |
+ elapsed_page_load >= 1 | |
+ and elapsed_activity >= self.task["activity_time"] | |
+ ): | |
done = True | |
- elif self.task['error'] is not None: | |
+ elif self.task["error"] is not None: | |
done = True | |
def grab_screenshot(self, path, png=True, resize=0): | |
"""Save the screen shot (png or jpeg)""" | |
if not self.main_thread_blocked: | |
- response = self.send_command("Page.captureScreenshot", {}, wait=True, timeout=10) | |
- if response is not None and 'result' in response and 'data' in response['result']: | |
- resize_string = '' if not resize else '-resize {0:d}x{0:d} '.format(resize) | |
+ response = self.send_command( | |
+ "Page.captureScreenshot", {}, wait=True, timeout=10 | |
+ ) | |
+ if ( | |
+ response is not None | |
+ and "result" in response | |
+ and "data" in response["result"] | |
+ ): | |
+ resize_string = ( | |
+ "" if not resize else "-resize {0:d}x{0:d} ".format(resize) | |
+ ) | |
if png: | |
- with open(path, 'wb') as image_file: | |
- image_file.write(base64.b64decode(response['result']['data'])) | |
+ with open(path, "wb") as image_file: | |
+ image_file.write(base64.b64decode(response["result"]["data"])) | |
# Fix png issues | |
- cmd = '{0} -format png -define png:color-type=2 '\ | |
- '-depth 8 {1}"{2}"'.format(self.job['image_magick']['mogrify'], | |
- resize_string, path) | |
+ cmd = ( | |
+ "{0} -format png -define png:color-type=2 " | |
+ '-depth 8 {1}"{2}"'.format( | |
+ self.job["image_magick"]["mogrify"], resize_string, path | |
+ ) | |
+ ) | |
logging.debug(cmd) | |
subprocess.call(cmd, shell=True) | |
else: | |
- tmp_file = path + '.png' | |
- with open(tmp_file, 'wb') as image_file: | |
- image_file.write(base64.b64decode(response['result']['data'])) | |
+ tmp_file = path + ".png" | |
+ with open(tmp_file, "wb") as image_file: | |
+ image_file.write(base64.b64decode(response["result"]["data"])) | |
command = '{0} "{1}" {2}-quality {3:d} "{4}"'.format( | |
- self.job['image_magick']['convert'], | |
- tmp_file, resize_string, self.job['imageQuality'], path) | |
+ self.job["image_magick"]["convert"], | |
+ tmp_file, | |
+ resize_string, | |
+ self.job["imageQuality"], | |
+ path, | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(tmp_file): | |
try: | |
os.remove(tmp_file) | |
@@ -772,275 +906,336 @@ | |
return similar | |
def execute_js(self, script): | |
"""Run the provided JS in the browser and return the result""" | |
ret = None | |
- if self.task['error'] is None and not self.main_thread_blocked: | |
- response = self.send_command("Runtime.evaluate", | |
- {'expression': script, | |
- 'returnByValue': True, | |
- 'timeout': 30000}, | |
- wait=True, timeout=30) | |
- if response is not None and 'result' in response and\ | |
- 'result' in response['result'] and\ | |
- 'value' in response['result']['result']: | |
- ret = response['result']['result']['value'] | |
+ if self.task["error"] is None and not self.main_thread_blocked: | |
+ response = self.send_command( | |
+ "Runtime.evaluate", | |
+ {"expression": script, "returnByValue": True, "timeout": 30000}, | |
+ wait=True, | |
+ timeout=30, | |
+ ) | |
+ if ( | |
+ response is not None | |
+ and "result" in response | |
+ and "result" in response["result"] | |
+ and "value" in response["result"]["result"] | |
+ ): | |
+ ret = response["result"]["result"]["value"] | |
return ret | |
def set_header(self, header): | |
"""Add/modify a header on the outbound requests""" | |
if header is not None and len(header): | |
- separator = header.find(':') | |
+ separator = header.find(":") | |
if separator > 0: | |
name = header[:separator].strip() | |
- value = header[separator + 1:].strip() | |
+ value = header[separator + 1 :].strip() | |
self.headers[name] = value | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, wait=True) | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", {"headers": self.headers}, wait=True | |
+ ) | |
def reset_headers(self): | |
"""Add/modify a header on the outbound requests""" | |
self.headers = {} | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, wait=True) | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", {"headers": self.headers}, wait=True | |
+ ) | |
def clear_cache(self): | |
"""Clear the browser cache""" | |
- self.send_command('Network.clearBrowserCache', {}, wait=True) | |
+ self.send_command("Network.clearBrowserCache", {}, wait=True) | |
def process_message(self, msg, target_id=None): | |
"""Process an inbound dev tools message""" | |
- if 'method' in msg and self.recording: | |
- parts = msg['method'].split('.') | |
+ if "method" in msg and self.recording: | |
+ parts = msg["method"].split(".") | |
if len(parts) >= 2: | |
category = parts[0] | |
event = parts[1] | |
- if category == 'Page': | |
+ if category == "Page": | |
self.log_dev_tools_event(msg) | |
self.process_page_event(event, msg) | |
- elif category == 'Network': | |
+ elif category == "Network": | |
self.log_dev_tools_event(msg) | |
self.process_network_event(event, msg, target_id) | |
- elif category == 'Inspector' and target_id is None: | |
+ elif category == "Inspector" and target_id is None: | |
self.process_inspector_event(event) | |
- elif category == 'CSS': | |
+ elif category == "CSS": | |
self.process_css_event(event, msg) | |
- elif category == 'Target': | |
+ elif category == "Target": | |
self.process_target_event(event, msg) | |
else: | |
self.log_dev_tools_event(msg) | |
- if 'id' in msg: | |
- response_id = int(re.search(r'\d+', str(msg['id'])).group()) | |
+ if "id" in msg: | |
+ response_id = int(re.search(r"\d+", str(msg["id"])).group()) | |
if response_id in self.pending_commands: | |
self.pending_commands.remove(response_id) | |
self.command_responses[response_id] = msg | |
def process_page_event(self, event, msg): | |
"""Process Page.* dev tools events""" | |
- if event == 'loadEventFired': | |
+ if event == "loadEventFired": | |
self.page_loaded = monotonic.monotonic() | |
- elif event == 'frameStartedLoading' and 'params' in msg and 'frameId' in msg['params']: | |
+ elif ( | |
+ event == "frameStartedLoading" | |
+ and "params" in msg | |
+ and "frameId" in msg["params"] | |
+ ): | |
if self.is_navigating and self.main_frame is None: | |
self.is_navigating = False | |
- self.main_frame = msg['params']['frameId'] | |
- if self.main_frame == msg['params']['frameId']: | |
+ self.main_frame = msg["params"]["frameId"] | |
+ if self.main_frame == msg["params"]["frameId"]: | |
logging.debug("Navigating main frame") | |
self.last_activity = monotonic.monotonic() | |
self.page_loaded = None | |
- elif event == 'frameNavigated' and 'params' in msg and \ | |
- 'frame' in msg['params'] and 'id' in msg['params']['frame']: | |
- if self.main_frame is not None and \ | |
- self.main_frame == msg['params']['frame']['id'] and\ | |
- 'injectScript' in self.job: | |
- self.execute_js(self.job['injectScript']) | |
- elif event == 'frameStoppedLoading' and 'params' in msg and 'frameId' in msg['params']: | |
- if self.main_frame is not None and \ | |
- not self.page_loaded and \ | |
- self.main_frame == msg['params']['frameId']: | |
+ elif ( | |
+ event == "frameNavigated" | |
+ and "params" in msg | |
+ and "frame" in msg["params"] | |
+ and "id" in msg["params"]["frame"] | |
+ ): | |
+ if ( | |
+ self.main_frame is not None | |
+ and self.main_frame == msg["params"]["frame"]["id"] | |
+ and "injectScript" in self.job | |
+ ): | |
+ self.execute_js(self.job["injectScript"]) | |
+ elif ( | |
+ event == "frameStoppedLoading" | |
+ and "params" in msg | |
+ and "frameId" in msg["params"] | |
+ ): | |
+ if ( | |
+ self.main_frame is not None | |
+ and not self.page_loaded | |
+ and self.main_frame == msg["params"]["frameId"] | |
+ ): | |
if self.nav_error is not None: | |
- self.task['error'] = self.nav_error | |
+ self.task["error"] = self.nav_error | |
logging.debug("Page load failed: %s", self.nav_error) | |
if self.nav_error_code is not None: | |
- self.task['page_data']['result'] = self.nav_error_code | |
+ self.task["page_data"]["result"] = self.nav_error_code | |
else: | |
- self.task['page_data']['result'] = 12999 | |
+ self.task["page_data"]["result"] = 12999 | |
self.page_loaded = monotonic.monotonic() | |
- elif event == 'javascriptDialogOpening': | |
- result = self.send_command("Page.handleJavaScriptDialog", {"accept": False}, wait=True) | |
- if result is not None and 'error' in result: | |
- result = self.send_command("Page.handleJavaScriptDialog", | |
- {"accept": True}, wait=True) | |
- if result is not None and 'error' in result: | |
- self.task['error'] = "Page opened a modal dailog" | |
- elif event == 'interstitialShown': | |
+ elif event == "javascriptDialogOpening": | |
+ result = self.send_command( | |
+ "Page.handleJavaScriptDialog", {"accept": False}, wait=True | |
+ ) | |
+ if result is not None and "error" in result: | |
+ result = self.send_command( | |
+ "Page.handleJavaScriptDialog", {"accept": True}, wait=True | |
+ ) | |
+ if result is not None and "error" in result: | |
+ self.task["error"] = "Page opened a modal dailog" | |
+ elif event == "interstitialShown": | |
self.main_thread_blocked = True | |
logging.debug("Page opened a modal interstitial") | |
self.nav_error = "Page opened a modal interstitial" | |
self.nav_error_code = 405 | |
def process_css_event(self, event, msg): | |
"""Handle CSS.* events""" | |
- if event == 'styleSheetAdded': | |
- if 'params' in msg and 'header' in msg['params']: | |
- entry = msg['params']['header'] | |
- if 'styleSheetId' in entry and \ | |
- entry['styleSheetId'] not in self.stylesheets and \ | |
- 'sourceURL' in entry and entry['sourceURL']: | |
- self.stylesheets[entry['styleSheetId']] = entry['sourceURL'] | |
+ if event == "styleSheetAdded": | |
+ if "params" in msg and "header" in msg["params"]: | |
+ entry = msg["params"]["header"] | |
+ if ( | |
+ "styleSheetId" in entry | |
+ and entry["styleSheetId"] not in self.stylesheets | |
+ and "sourceURL" in entry | |
+ and entry["sourceURL"] | |
+ ): | |
+ self.stylesheets[entry["styleSheetId"]] = entry["sourceURL"] | |
def process_network_event(self, event, msg, target_id=None): | |
"""Process Network.* dev tools events""" | |
- if event == 'requestIntercepted': | |
- params = {'interceptionId': msg['params']['interceptionId']} | |
- if 'overrideHosts' in self.task: | |
- url = msg['params']['request']['url'] | |
- parts = urlsplit(url).netloc.split(':') | |
+ if event == "requestIntercepted": | |
+ params = {"interceptionId": msg["params"]["interceptionId"]} | |
+ if "overrideHosts" in self.task: | |
+ url = msg["params"]["request"]["url"] | |
+ parts = urlsplit(url).netloc.split(":") | |
host = parts[0] | |
# go through the override list and find the fist match (supporting wildcards) | |
try: | |
from fnmatch import fnmatch | |
- for host_match in self.task['overrideHosts']: | |
+ | |
+ for host_match in self.task["overrideHosts"]: | |
if fnmatch(host, host_match): | |
# Overriding to * is just a passthrough, don't actually modify anything | |
- if self.task['overrideHosts'][host_match] != '*': | |
- headers = msg['params']['request']['headers'] | |
- headers['Host'] = self.task['overrideHosts'][host_match] | |
- headers['x-Host'] = host | |
- params['headers'] = headers | |
- params['url'] = url.replace(host, self.task['overrideHosts'][host_match], 1) | |
+ if self.task["overrideHosts"][host_match] != "*": | |
+ headers = msg["params"]["request"]["headers"] | |
+ headers["Host"] = self.task["overrideHosts"][host_match] | |
+ headers["x-Host"] = host | |
+ params["headers"] = headers | |
+ params["url"] = url.replace( | |
+ host, self.task["overrideHosts"][host_match], 1 | |
+ ) | |
break | |
except Exception: | |
pass | |
- self.send_command('Network.continueInterceptedRequest', params) | |
- elif 'requestId' in msg['params']: | |
- request_id = msg['params']['requestId'] | |
+ self.send_command("Network.continueInterceptedRequest", params) | |
+ elif "requestId" in msg["params"]: | |
+ request_id = msg["params"]["requestId"] | |
if request_id not in self.requests: | |
- self.requests[request_id] = {'id': request_id} | |
+ self.requests[request_id] = {"id": request_id} | |
request = self.requests[request_id] | |
if target_id is not None: | |
- request['targetId'] = target_id | |
- ignore_activity = request['is_video'] if 'is_video' in request else False | |
- if event == 'requestWillBeSent': | |
- if self.is_navigating and self.main_frame is None and \ | |
- 'frameId' in msg['params']: | |
+ request["targetId"] = target_id | |
+ ignore_activity = request["is_video"] if "is_video" in request else False | |
+ if event == "requestWillBeSent": | |
+ if ( | |
+ self.is_navigating | |
+ and self.main_frame is None | |
+ and "frameId" in msg["params"] | |
+ ): | |
self.is_navigating = False | |
- self.main_frame = msg['params']['frameId'] | |
- if 'request' not in request: | |
- request['request'] = [] | |
- request['request'].append(msg['params']) | |
- if 'url' in msg['params'] and msg['params']['url'].endswith('.mp4'): | |
- request['is_video'] = True | |
- request['fromNet'] = True | |
- if self.main_frame is not None and \ | |
- self.main_request is None and \ | |
- 'frameId' in msg['params'] and \ | |
- msg['params']['frameId'] == self.main_frame: | |
- logging.debug('Main request detected') | |
+ self.main_frame = msg["params"]["frameId"] | |
+ if "request" not in request: | |
+ request["request"] = [] | |
+ request["request"].append(msg["params"]) | |
+ if "url" in msg["params"] and msg["params"]["url"].endswith(".mp4"): | |
+ request["is_video"] = True | |
+ request["fromNet"] = True | |
+ if ( | |
+ self.main_frame is not None | |
+ and self.main_request is None | |
+ and "frameId" in msg["params"] | |
+ and msg["params"]["frameId"] == self.main_frame | |
+ ): | |
+ logging.debug("Main request detected") | |
self.main_request = request_id | |
- if 'timestamp' in msg['params']: | |
- self.start_timestamp = float(msg['params']['timestamp']) | |
- elif event == 'resourceChangedPriority': | |
- if 'priority' not in request: | |
- request['priority'] = [] | |
- request['priority'].append(msg['params']) | |
- elif event == 'requestServedFromCache': | |
+ if "timestamp" in msg["params"]: | |
+ self.start_timestamp = float(msg["params"]["timestamp"]) | |
+ elif event == "resourceChangedPriority": | |
+ if "priority" not in request: | |
+ request["priority"] = [] | |
+ request["priority"].append(msg["params"]) | |
+ elif event == "requestServedFromCache": | |
self.response_started = True | |
- request['fromNet'] = False | |
- elif event == 'responseReceived': | |
+ request["fromNet"] = False | |
+ elif event == "responseReceived": | |
self.response_started = True | |
- if 'response' not in request: | |
- request['response'] = [] | |
- request['response'].append(msg['params']) | |
- if 'response' in msg['params']: | |
- response = msg['params']['response'] | |
- if 'fromDiskCache' in response and response['fromDiskCache']: | |
- request['fromNet'] = False | |
- if 'fromServiceWorker' in response and response['fromServiceWorker']: | |
- request['fromNet'] = False | |
- if 'mimeType' in response and response['mimeType'].startswith('video/'): | |
- request['is_video'] = True | |
- if self.main_request is not None and \ | |
- request_id == self.main_request and \ | |
- 'headers' in response: | |
- self.main_request_headers = response['headers'] | |
- if self.main_request is not None and \ | |
- request_id == self.main_request and \ | |
- 'status' in response and response['status'] >= 400: | |
- self.nav_error_code = response['status'] | |
- if 'statusText' in response and response['statusText']: | |
- self.nav_error = response['statusText'] | |
+ if "response" not in request: | |
+ request["response"] = [] | |
+ request["response"].append(msg["params"]) | |
+ if "response" in msg["params"]: | |
+ response = msg["params"]["response"] | |
+ if "fromDiskCache" in response and response["fromDiskCache"]: | |
+ request["fromNet"] = False | |
+ if ( | |
+ "fromServiceWorker" in response | |
+ and response["fromServiceWorker"] | |
+ ): | |
+ request["fromNet"] = False | |
+ if "mimeType" in response and response["mimeType"].startswith( | |
+ "video/" | |
+ ): | |
+ request["is_video"] = True | |
+ if ( | |
+ self.main_request is not None | |
+ and request_id == self.main_request | |
+ and "headers" in response | |
+ ): | |
+ self.main_request_headers = response["headers"] | |
+ if ( | |
+ self.main_request is not None | |
+ and request_id == self.main_request | |
+ and "status" in response | |
+ and response["status"] >= 400 | |
+ ): | |
+ self.nav_error_code = response["status"] | |
+ if "statusText" in response and response["statusText"]: | |
+ self.nav_error = response["statusText"] | |
else: | |
- self.nav_error = '{0:d} Navigation error'.format(self.nav_error_code) | |
- logging.debug('Main resource Navigation error: %s', self.nav_error) | |
- elif event == 'dataReceived': | |
+ self.nav_error = "{0:d} Navigation error".format( | |
+ self.nav_error_code | |
+ ) | |
+ logging.debug( | |
+ "Main resource Navigation error: %s", self.nav_error | |
+ ) | |
+ elif event == "dataReceived": | |
self.response_started = True | |
- if 'data' not in request: | |
- request['data'] = [] | |
- request['data'].append(msg['params']) | |
- elif event == 'loadingFinished': | |
+ if "data" not in request: | |
+ request["data"] = [] | |
+ request["data"].append(msg["params"]) | |
+ elif event == "loadingFinished": | |
self.response_started = True | |
- request['finished'] = msg['params'] | |
+ request["finished"] = msg["params"] | |
self.get_response_body(request_id) | |
- elif event == 'loadingFailed': | |
- request['failed'] = msg['params'] | |
+ elif event == "loadingFailed": | |
+ request["failed"] = msg["params"] | |
if not self.response_started: | |
- if 'errorText' in msg['params']: | |
- self.nav_error = msg['params']['errorText'] | |
+ if "errorText" in msg["params"]: | |
+ self.nav_error = msg["params"]["errorText"] | |
else: | |
- self.nav_error = 'Unknown navigation error' | |
+ self.nav_error = "Unknown navigation error" | |
self.nav_error_code = 404 | |
- logging.debug('Navigation error: %s', self.nav_error) | |
- elif self.main_request is not None and \ | |
- request_id == self.main_request and \ | |
- 'errorText' in msg['params'] and \ | |
- 'canceled' in msg['params'] and \ | |
- not msg['params']['canceled']: | |
- self.nav_error = msg['params']['errorText'] | |
+ logging.debug("Navigation error: %s", self.nav_error) | |
+ elif ( | |
+ self.main_request is not None | |
+ and request_id == self.main_request | |
+ and "errorText" in msg["params"] | |
+ and "canceled" in msg["params"] | |
+ and not msg["params"]["canceled"] | |
+ ): | |
+ self.nav_error = msg["params"]["errorText"] | |
self.nav_error_code = 404 | |
- logging.debug('Navigation error: %s', self.nav_error) | |
+ logging.debug("Navigation error: %s", self.nav_error) | |
else: | |
ignore_activity = True | |
- if not self.task['stop_at_onload'] and not ignore_activity: | |
+ if not self.task["stop_at_onload"] and not ignore_activity: | |
self.last_activity = monotonic.monotonic() | |
def process_inspector_event(self, event): | |
"""Process Inspector.* dev tools events""" | |
- if event == 'detached': | |
- self.task['error'] = 'Inspector detached, possibly crashed.' | |
- self.task['page_data']['result'] = 12999 | |
- elif event == 'targetCrashed': | |
- self.task['error'] = 'Browser crashed.' | |
- self.task['page_data']['result'] = 12999 | |
+ if event == "detached": | |
+ self.task["error"] = "Inspector detached, possibly crashed." | |
+ self.task["page_data"]["result"] = 12999 | |
+ elif event == "targetCrashed": | |
+ self.task["error"] = "Browser crashed." | |
+ self.task["page_data"]["result"] = 12999 | |
def process_target_event(self, event, msg): | |
"""Process Target.* dev tools events""" | |
- if event == 'attachedToTarget': | |
- if 'targetInfo' in msg['params'] and 'targetId' in msg['params']['targetInfo']: | |
- target = msg['params']['targetInfo'] | |
- if 'type' in target and target['type'] == 'service_worker': | |
+ if event == "attachedToTarget": | |
+ if ( | |
+ "targetInfo" in msg["params"] | |
+ and "targetId" in msg["params"]["targetInfo"] | |
+ ): | |
+ target = msg["params"]["targetInfo"] | |
+ if "type" in target and target["type"] == "service_worker": | |
self.workers.append(target) | |
if self.recording: | |
- self.send_command('Network.enable', {}, target_id=target['targetId']) | |
+ self.send_command( | |
+ "Network.enable", {}, target_id=target["targetId"] | |
+ ) | |
if self.headers: | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, target_id=target['targetId'], | |
- wait=True) | |
- self.send_command('Runtime.runIfWaitingForDebugger', {}, | |
- target_id=target['targetId']) | |
- if event == 'receivedMessageFromTarget': | |
- if 'message' in msg['params'] and 'targetId' in msg['params']: | |
- logging.debug(msg['params']['message'][:200]) | |
- target_id = msg['params']['targetId'] | |
- target_message = json.loads(msg['params']['message']) | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", | |
+ {"headers": self.headers}, | |
+ target_id=target["targetId"], | |
+ wait=True, | |
+ ) | |
+ self.send_command( | |
+ "Runtime.runIfWaitingForDebugger", {}, target_id=target["targetId"] | |
+ ) | |
+ if event == "receivedMessageFromTarget": | |
+ if "message" in msg["params"] and "targetId" in msg["params"]: | |
+ logging.debug(msg["params"]["message"][:200]) | |
+ target_id = msg["params"]["targetId"] | |
+ target_message = json.loads(msg["params"]["message"]) | |
self.process_message(target_message, target_id=target_id) | |
def log_dev_tools_event(self, msg): | |
"""Log the dev tools events to a file""" | |
- if self.task['log_data']: | |
+ if self.task["log_data"]: | |
if self.dev_tools_file is None: | |
- path = self.path_base + '_devtools.json.gz' | |
- self.dev_tools_file = gzip.open(path, 'wb', 7) | |
+ path = self.path_base + "_devtools.json.gz" | |
+ self.dev_tools_file = gzip.open(path, "wb", 7) | |
self.dev_tools_file.write("[{}") | |
if self.dev_tools_file is not None: | |
self.dev_tools_file.write(",\n") | |
self.dev_tools_file.write(json.dumps(msg)) | |
@@ -1052,27 +1247,27 @@ | |
value = headers[name] | |
else: | |
find = name.lower() | |
for header_name in headers: | |
check = header_name.lower() | |
- if check == find or (check[0] == ':' and check[1:] == find): | |
+ if check == find or (check[0] == ":" and check[1:] == find): | |
value = headers[header_name] | |
break | |
return value | |
def bytes_from_range(self, text, range_info): | |
"""Convert a line/column start and end into a byte count""" | |
byte_count = 0 | |
try: | |
lines = text.splitlines() | |
line_count = len(lines) | |
- start_line = range_info['startLine'] | |
- end_line = range_info['endLine'] | |
+ start_line = range_info["startLine"] | |
+ end_line = range_info["endLine"] | |
if start_line > line_count or end_line > line_count: | |
return 0 | |
- start_column = range_info['startColumn'] | |
- end_column = range_info['endColumn'] | |
+ start_column = range_info["startColumn"] | |
+ end_column = range_info["endColumn"] | |
if start_line == end_line: | |
byte_count = end_column - start_column + 1 | |
else: | |
# count the whole lines between the partial start and end lines | |
if end_line > start_line + 1: | |
@@ -1085,14 +1280,23 @@ | |
return byte_count | |
class DevToolsClient(WebSocketClient): | |
"""DevTools WebSocket client""" | |
- def __init__(self, url, protocols=None, extensions=None, heartbeat_freq=None, | |
- ssl_options=None, headers=None): | |
- WebSocketClient.__init__(self, url, protocols, extensions, heartbeat_freq, | |
- ssl_options, headers) | |
+ | |
+ def __init__( | |
+ self, | |
+ url, | |
+ protocols=None, | |
+ extensions=None, | |
+ heartbeat_freq=None, | |
+ ssl_options=None, | |
+ headers=None, | |
+ ): | |
+ WebSocketClient.__init__( | |
+ self, url, protocols, extensions, heartbeat_freq, ssl_options, headers | |
+ ) | |
self.connected = False | |
self.messages = Queue.Queue() | |
self.trace_file = None | |
self.video_prefix = None | |
self.trace_ts_start = None | |
@@ -1121,26 +1325,40 @@ | |
def received_message(self, raw): | |
"""WebSocket interface - message received""" | |
try: | |
if raw.is_text: | |
- message = raw.data.decode(raw.encoding) if raw.encoding is not None else raw.data | |
+ message = ( | |
+ raw.data.decode(raw.encoding) | |
+ if raw.encoding is not None | |
+ else raw.data | |
+ ) | |
compare = message[:50] | |
- if self.path_base is not None and compare.find('"Tracing.dataCollected') > -1: | |
+ if ( | |
+ self.path_base is not None | |
+ and compare.find('"Tracing.dataCollected') > -1 | |
+ ): | |
now = monotonic.monotonic() | |
msg = json.loads(message) | |
message = None | |
if msg is not None: | |
self.process_trace_event(msg) | |
if self.last_data is None or now - self.last_data >= 1.0: | |
self.last_data = now | |
self.messages.put('{"method":"got_message"}') | |
- logging.debug('Processed %d trace events', self.processed_event_count) | |
+ logging.debug( | |
+ "Processed %d trace events", self.processed_event_count | |
+ ) | |
self.processed_event_count = 0 | |
- elif self.trace_file is not None and compare.find('"Tracing.tracingComplete') > -1: | |
+ elif ( | |
+ self.trace_file is not None | |
+ and compare.find('"Tracing.tracingComplete') > -1 | |
+ ): | |
if self.processed_event_count: | |
- logging.debug('Processed %d trace events', self.processed_event_count) | |
+ logging.debug( | |
+ "Processed %d trace events", self.processed_event_count | |
+ ) | |
self.trace_file.write("\n]}") | |
self.trace_file.close() | |
self.trace_file = None | |
if message is not None: | |
self.messages.put(message) | |
@@ -1158,11 +1376,20 @@ | |
self.messages.task_done() | |
except Exception: | |
pass | |
return message | |
- def start_processing_trace(self, path_base, video_prefix, options, job, task, start_timestamp, keep_timeline): | |
+ def start_processing_trace( | |
+ self, | |
+ path_base, | |
+ video_prefix, | |
+ options, | |
+ job, | |
+ task, | |
+ start_timestamp, | |
+ keep_timeline, | |
+ ): | |
"""Write any trace events to the given file""" | |
self.last_image = None | |
self.trace_ts_start = None | |
if start_timestamp is not None: | |
self.trace_ts_start = int(start_timestamp * 1000000) | |
@@ -1174,13 +1401,16 @@ | |
self.video_viewport = None | |
self.keep_timeline = keep_timeline | |
def stop_processing_trace(self): | |
"""All done""" | |
- if self.pending_image is not None and self.last_image is not None and\ | |
- self.pending_image["image"] != self.last_image["image"]: | |
- with open(self.pending_image["path"], 'wb') as image_file: | |
+ if ( | |
+ self.pending_image is not None | |
+ and self.last_image is not None | |
+ and self.pending_image["image"] != self.last_image["image"] | |
+ ): | |
+ with open(self.pending_image["path"], "wb") as image_file: | |
image_file.write(base64.b64decode(self.pending_image["image"])) | |
self.pending_image = None | |
self.trace_ts_start = None | |
if self.trace_file is not None: | |
self.trace_file.write("\n]}") | |
@@ -1195,85 +1425,110 @@ | |
start = monotonic.monotonic() | |
logging.debug("Post-Processing the trace netlog events") | |
self.trace_parser.post_process_netlog_events() | |
logging.debug("Processing the trace timeline events") | |
self.trace_parser.ProcessTimelineEvents() | |
- self.trace_parser.WriteUserTiming(self.path_base + '_user_timing.json.gz') | |
- self.trace_parser.WriteCPUSlices(self.path_base + '_timeline_cpu.json.gz') | |
- self.trace_parser.WriteScriptTimings(self.path_base + '_script_timing.json.gz') | |
- self.trace_parser.WriteFeatureUsage(self.path_base + '_feature_usage.json.gz') | |
- self.trace_parser.WriteInteractive(self.path_base + '_interactive.json.gz') | |
- self.trace_parser.WriteNetlog(self.path_base + '_netlog_requests.json.gz') | |
- self.trace_parser.WriteV8Stats(self.path_base + '_v8stats.json.gz') | |
+ self.trace_parser.WriteUserTiming(self.path_base + "_user_timing.json.gz") | |
+ self.trace_parser.WriteCPUSlices(self.path_base + "_timeline_cpu.json.gz") | |
+ self.trace_parser.WriteScriptTimings( | |
+ self.path_base + "_script_timing.json.gz" | |
+ ) | |
+ self.trace_parser.WriteFeatureUsage( | |
+ self.path_base + "_feature_usage.json.gz" | |
+ ) | |
+ self.trace_parser.WriteInteractive(self.path_base + "_interactive.json.gz") | |
+ self.trace_parser.WriteNetlog(self.path_base + "_netlog_requests.json.gz") | |
+ self.trace_parser.WriteV8Stats(self.path_base + "_v8stats.json.gz") | |
elapsed = monotonic.monotonic() - start | |
logging.debug("Done processing the trace events: %0.3fs", elapsed) | |
self.trace_parser = None | |
self.path_base = None | |
logging.debug("Trace event counts:") | |
for cat in self.trace_event_counts: | |
- logging.debug(' %s: %s', cat, self.trace_event_counts[cat]) | |
+ logging.debug(" %s: %s", cat, self.trace_event_counts[cat]) | |
self.trace_event_counts = {} | |
def process_trace_event(self, msg): | |
"""Process Tracing.* dev tools events""" | |
- if 'params' in msg and 'value' in msg['params'] and len(msg['params']['value']): | |
+ if "params" in msg and "value" in msg["params"] and len(msg["params"]["value"]): | |
if self.trace_file is None and self.keep_timeline: | |
- self.trace_file = gzip.open(self.path_base + '_trace.json.gz', | |
- 'wb', compresslevel=7) | |
+ self.trace_file = gzip.open( | |
+ self.path_base + "_trace.json.gz", "wb", compresslevel=7 | |
+ ) | |
self.trace_file.write('{"traceEvents":[{}') | |
if self.trace_parser is None: | |
from internal.support.trace_parser import Trace | |
+ | |
self.trace_parser = Trace() | |
# write out the trace events one-per-line but pull out any | |
# devtools screenshots as separate files. | |
- trace_events = msg['params']['value'] | |
- out = '' | |
+ trace_events = msg["params"]["value"] | |
+ out = "" | |
for _, trace_event in enumerate(trace_events): | |
self.processed_event_count += 1 | |
keep_event = self.keep_timeline | |
process_event = True | |
- if self.video_prefix is not None and 'cat' in trace_event and \ | |
- 'name' in trace_event and 'ts' in trace_event: | |
- if self.trace_ts_start is None and \ | |
- (trace_event['name'] == 'navigationStart' or | |
- trace_event['name'] == 'fetchStart') and \ | |
- trace_event['cat'].find('blink.user_timing') > -1: | |
- logging.debug("Trace start detected: %d", trace_event['ts']) | |
- self.trace_ts_start = trace_event['ts'] | |
- if self.trace_ts_start is None and \ | |
- (trace_event['name'] == 'navigationStart' or | |
- trace_event['name'] == 'fetchStart') and \ | |
- trace_event['cat'].find('rail') > -1: | |
- logging.debug("Trace start detected: %d", trace_event['ts']) | |
- self.trace_ts_start = trace_event['ts'] | |
- if trace_event['name'] == 'Screenshot' and \ | |
- trace_event['cat'].find('devtools.screenshot') > -1: | |
+ if ( | |
+ self.video_prefix is not None | |
+ and "cat" in trace_event | |
+ and "name" in trace_event | |
+ and "ts" in trace_event | |
+ ): | |
+ if ( | |
+ self.trace_ts_start is None | |
+ and ( | |
+ trace_event["name"] == "navigationStart" | |
+ or trace_event["name"] == "fetchStart" | |
+ ) | |
+ and trace_event["cat"].find("blink.user_timing") > -1 | |
+ ): | |
+ logging.debug("Trace start detected: %d", trace_event["ts"]) | |
+ self.trace_ts_start = trace_event["ts"] | |
+ if ( | |
+ self.trace_ts_start is None | |
+ and ( | |
+ trace_event["name"] == "navigationStart" | |
+ or trace_event["name"] == "fetchStart" | |
+ ) | |
+ and trace_event["cat"].find("rail") > -1 | |
+ ): | |
+ logging.debug("Trace start detected: %d", trace_event["ts"]) | |
+ self.trace_ts_start = trace_event["ts"] | |
+ if ( | |
+ trace_event["name"] == "Screenshot" | |
+ and trace_event["cat"].find("devtools.screenshot") > -1 | |
+ ): | |
keep_event = False | |
process_event = False | |
self.process_screenshot(trace_event) | |
- if 'cat' in trace_event: | |
- if trace_event['cat'] not in self.trace_event_counts: | |
- self.trace_event_counts[trace_event['cat']] = 0 | |
- self.trace_event_counts[trace_event['cat']] += 1 | |
- if not self.job['keep_netlog'] and trace_event['cat'] == 'netlog': | |
+ if "cat" in trace_event: | |
+ if trace_event["cat"] not in self.trace_event_counts: | |
+ self.trace_event_counts[trace_event["cat"]] = 0 | |
+ self.trace_event_counts[trace_event["cat"]] += 1 | |
+ if not self.job["keep_netlog"] and trace_event["cat"] == "netlog": | |
keep_event = False | |
if process_event and self.trace_parser is not None: | |
self.trace_parser.ProcessTraceEvent(trace_event) | |
if keep_event: | |
out += ",\n" + json.dumps(trace_event) | |
if self.trace_file is not None and len(out): | |
self.trace_file.write(out) | |
def process_screenshot(self, trace_event): | |
"""Process an individual screenshot event""" | |
- if self.trace_ts_start is not None and 'args' in trace_event and \ | |
- 'snapshot' in trace_event['args']: | |
- ms_elapsed = int(round(float(trace_event['ts'] - self.trace_ts_start) / 1000.0)) | |
+ if ( | |
+ self.trace_ts_start is not None | |
+ and "args" in trace_event | |
+ and "snapshot" in trace_event["args"] | |
+ ): | |
+ ms_elapsed = int( | |
+ round(float(trace_event["ts"] - self.trace_ts_start) / 1000.0) | |
+ ) | |
if ms_elapsed >= 0: | |
- img = trace_event['args']['snapshot'] | |
- path = '{0}{1:06d}.jpg'.format(self.video_prefix, ms_elapsed) | |
- logging.debug("Video frame (%f): %s", trace_event['ts'], path) | |
+ img = trace_event["args"]["snapshot"] | |
+ path = "{0}{1:06d}.jpg".format(self.video_prefix, ms_elapsed) | |
+ logging.debug("Video frame (%f): %s", trace_event["ts"], path) | |
# Sample frames at at 100ms intervals for the first 20 seconds, | |
# 500ms for 20-40seconds and 2 second intervals after that | |
min_interval = 100 | |
if ms_elapsed > 40000: | |
min_interval = 2000 | |
@@ -1283,37 +1538,49 @@ | |
if self.last_image is not None: | |
elapsed_interval = ms_elapsed - self.last_image["time"] | |
if elapsed_interval < min_interval: | |
keep_image = False | |
if self.pending_image is not None: | |
- logging.debug("Discarding pending image: %s", | |
- self.pending_image["path"]) | |
- self.pending_image = {"image": str(img), | |
- "time": int(ms_elapsed), | |
- "path": str(path)} | |
+ logging.debug( | |
+ "Discarding pending image: %s", | |
+ self.pending_image["path"], | |
+ ) | |
+ self.pending_image = { | |
+ "image": str(img), | |
+ "time": int(ms_elapsed), | |
+ "path": str(path), | |
+ } | |
if keep_image: | |
is_duplicate = False | |
if self.pending_image is not None: | |
if self.pending_image["image"] == img: | |
is_duplicate = True | |
- elif self.last_image is not None and \ | |
- self.last_image["image"] == img: | |
+ elif ( | |
+ self.last_image is not None and self.last_image["image"] == img | |
+ ): | |
is_duplicate = True | |
if is_duplicate: | |
- logging.debug('Dropping duplicate image: %s', path) | |
+ logging.debug("Dropping duplicate image: %s", path) | |
else: | |
# write both the pending image and the current one if | |
# the interval is double the normal sampling rate | |
- if self.last_image is not None and self.pending_image is not None and \ | |
- self.pending_image["image"] != self.last_image["image"]: | |
+ if ( | |
+ self.last_image is not None | |
+ and self.pending_image is not None | |
+ and self.pending_image["image"] != self.last_image["image"] | |
+ ): | |
elapsed_interval = ms_elapsed - self.last_image["time"] | |
if elapsed_interval > 2 * min_interval: | |
pending = self.pending_image["path"] | |
- with open(pending, 'wb') as image_file: | |
- image_file.write(base64.b64decode(self.pending_image["image"])) | |
+ with open(pending, "wb") as image_file: | |
+ image_file.write( | |
+ base64.b64decode(self.pending_image["image"]) | |
+ ) | |
self.pending_image = None | |
- with open(path, 'wb') as image_file: | |
- self.last_image = {"image": str(img), | |
- "time": int(ms_elapsed), | |
- "path": str(path)} | |
+ with open(path, "wb") as image_file: | |
+ self.last_image = { | |
+ "image": str(img), | |
+ "time": int(ms_elapsed), | |
+ "path": str(path), | |
+ } | |
image_file.write(base64.b64decode(img)) | |
--- internal/traceroute.py 2018-10-25 17:07:28.869579 +0000 | |
+++ internal/traceroute.py 2019-02-06 17:08:28.736881 +0000 | |
@@ -11,10 +11,11 @@ | |
import urlparse | |
class Traceroute(object): | |
"""Traceroute (desktop)""" | |
+ | |
def __init__(self, options, job): | |
self.options = options | |
self.job = job | |
def prepare(self, job, task): | |
@@ -25,143 +26,197 @@ | |
"""Launch the browser""" | |
pass | |
def run_task(self, task): | |
"""Run an individual test""" | |
- if 'url' in self.job: | |
+ if "url" in self.job: | |
results = None | |
- hostname = urlparse.urlparse(self.job['url']).hostname | |
- if platform.system() == 'Windows': | |
+ hostname = urlparse.urlparse(self.job["url"]).hostname | |
+ if platform.system() == "Windows": | |
last_hop, results = self.windows_traceroute(hostname) | |
else: | |
last_hop, results = self.unix_traceroute(hostname) | |
if last_hop > 0 and results is not None and len(results): | |
- out_file = os.path.join(task['dir'], task['prefix']) + '_traceroute.txt.gz' | |
- with gzip.open(out_file, 'wb', 7) as f_out: | |
- f_out.write('Hop,IP,ms,FQDN\n') | |
+ out_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) + "_traceroute.txt.gz" | |
+ ) | |
+ with gzip.open(out_file, "wb", 7) as f_out: | |
+ f_out.write("Hop,IP,ms,FQDN\n") | |
if 0 in results: | |
- f_out.write('-1,{0},0,{1}\n'.format(results[0]['addr'], hostname)) | |
+ f_out.write( | |
+ "-1,{0},0,{1}\n".format(results[0]["addr"], hostname) | |
+ ) | |
else: | |
- f_out.write('-1,,0,{0}\n'.format(hostname)) | |
+ f_out.write("-1,,0,{0}\n".format(hostname)) | |
for hop in xrange(1, last_hop + 1): | |
if hop in results: | |
entry = results[hop] | |
- f_out.write('{0:d},{1},{2},{3}\n'.format(hop, entry['addr'], | |
- entry['ms'], | |
- entry['hostname'])) | |
+ f_out.write( | |
+ "{0:d},{1},{2},{3}\n".format( | |
+ hop, entry["addr"], entry["ms"], entry["hostname"] | |
+ ) | |
+ ) | |
else: | |
- f_out.write('{0:d},,,\n'.format(hop)) | |
+ f_out.write("{0:d},,,\n".format(hop)) | |
def windows_traceroute(self, hostname): | |
"""Run a traceroute on Windows""" | |
ret = {} | |
last_hop = 0 | |
- command = ['tracert', '-h', '30', '-w', '500', hostname] | |
- logging.debug(' '.join(command)) | |
+ command = ["tracert", "-h", "30", "-w", "500", hostname] | |
+ logging.debug(" ".join(command)) | |
out = subprocess.check_output(command) | |
lines = out.splitlines() | |
- dest = re.compile(r'^Tracing route to.*\[([\d\.]+)\]') | |
- timeout = re.compile(r'^\s*(\d+).*Request timed out') | |
- address_only = re.compile(r'^\s*(\d+)\s+' | |
- r'\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+' | |
- r'([\d\.]+)') | |
- with_hostname = re.compile(r'^\s*(\d+)\s+' | |
- r'\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+' | |
- r'([^\s]*)\s+\[([\d\.]+)\]') | |
+ dest = re.compile(r"^Tracing route to.*\[([\d\.]+)\]") | |
+ timeout = re.compile(r"^\s*(\d+).*Request timed out") | |
+ address_only = re.compile( | |
+ r"^\s*(\d+)\s+" | |
+ r"\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+" | |
+ r"([\d\.]+)" | |
+ ) | |
+ with_hostname = re.compile( | |
+ r"^\s*(\d+)\s+" | |
+ r"\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+\<?\*?(\d*)[\sms]+" | |
+ r"([^\s]*)\s+\[([\d\.]+)\]" | |
+ ) | |
for line in lines: | |
logging.debug(line) | |
try: | |
fields = with_hostname.search(line) | |
if fields: | |
hop = int(fields.group(1)) | |
- hop_time = None if not len(fields.group(2)) else int(fields.group(2)) | |
- next_time = None if not len(fields.group(3)) else int(fields.group(3)) | |
- if next_time is not None: | |
- if hop_time is None or next_time < hop_time: | |
- hop_time = next_time | |
- next_time = None if not len(fields.group(4)) else int(fields.group(4)) | |
- if next_time is not None: | |
- if hop_time is None or next_time < hop_time: | |
- hop_time = next_time | |
- report_time = '{0:d}'.format(hop_time) if hop_time is not None else '' | |
- ret[hop] = {'ms': report_time, 'hostname': fields.group(5), | |
- 'addr': fields.group(6)} | |
+ hop_time = ( | |
+ None if not len(fields.group(2)) else int(fields.group(2)) | |
+ ) | |
+ next_time = ( | |
+ None if not len(fields.group(3)) else int(fields.group(3)) | |
+ ) | |
+ if next_time is not None: | |
+ if hop_time is None or next_time < hop_time: | |
+ hop_time = next_time | |
+ next_time = ( | |
+ None if not len(fields.group(4)) else int(fields.group(4)) | |
+ ) | |
+ if next_time is not None: | |
+ if hop_time is None or next_time < hop_time: | |
+ hop_time = next_time | |
+ report_time = ( | |
+ "{0:d}".format(hop_time) if hop_time is not None else "" | |
+ ) | |
+ ret[hop] = { | |
+ "ms": report_time, | |
+ "hostname": fields.group(5), | |
+ "addr": fields.group(6), | |
+ } | |
if hop > last_hop: | |
last_hop = hop | |
else: | |
fields = address_only.search(line) | |
if fields: | |
hop = int(fields.group(1)) | |
- hop_time = None if not len(fields.group(2)) else int(fields.group(2)) | |
- next_time = None if not len(fields.group(3)) else int(fields.group(3)) | |
+ hop_time = ( | |
+ None if not len(fields.group(2)) else int(fields.group(2)) | |
+ ) | |
+ next_time = ( | |
+ None if not len(fields.group(3)) else int(fields.group(3)) | |
+ ) | |
if next_time is not None: | |
if hop_time is None or next_time < hop_time: | |
hop_time = next_time | |
- next_time = None if not len(fields.group(4)) else int(fields.group(4)) | |
+ next_time = ( | |
+ None if not len(fields.group(4)) else int(fields.group(4)) | |
+ ) | |
if next_time is not None: | |
if hop_time is None or next_time < hop_time: | |
hop_time = next_time | |
- report_time = '{0:d}'.format(hop_time) if hop_time is not None else '' | |
- ret[hop] = {'ms': report_time, 'hostname': '', 'addr': fields.group(5)} | |
+ report_time = ( | |
+ "{0:d}".format(hop_time) if hop_time is not None else "" | |
+ ) | |
+ ret[hop] = { | |
+ "ms": report_time, | |
+ "hostname": "", | |
+ "addr": fields.group(5), | |
+ } | |
if hop > last_hop: | |
last_hop = hop | |
else: | |
fields = timeout.search(line) | |
if fields: | |
hop = int(fields.group(1)) | |
- ret[hop] = {'ms': '', 'hostname': '', 'addr': ''} | |
+ ret[hop] = {"ms": "", "hostname": "", "addr": ""} | |
else: | |
fields = dest.search(line) | |
if fields: | |
- ret[0] = {'ms': '', 'hostname': hostname, 'addr': fields.group(1)} | |
+ ret[0] = { | |
+ "ms": "", | |
+ "hostname": hostname, | |
+ "addr": fields.group(1), | |
+ } | |
except Exception: | |
pass | |
return last_hop, ret | |
def unix_traceroute(self, hostname): | |
"""Run a traceroute on a system that supports bsd traceroute""" | |
ret = {} | |
last_hop = 0 | |
ret = {} | |
last_hop = 0 | |
- command = ['traceroute', '-m', '30', '-w', '0.5', hostname] | |
- logging.debug(' '.join(command)) | |
+ command = ["traceroute", "-m", "30", "-w", "0.5", hostname] | |
+ logging.debug(" ".join(command)) | |
out = subprocess.check_output(command) | |
lines = out.splitlines() | |
- dest = re.compile(r'^traceroute to [^\(]+\(([\d\.]+)\)') | |
- timeout = re.compile(r'^\s*(\d+)\s+\*\s+\*\s+\*') | |
- success = re.compile(r'^\s*(\d+)\s+([^\s]+)\s+\(([\d\.]+)\)\s+' | |
- r'\*?([\d\.]*)[\sms]+\*?([\d\.]*)[\sms]+\*?([\d\.]*)[\sms]+') | |
+ dest = re.compile(r"^traceroute to [^\(]+\(([\d\.]+)\)") | |
+ timeout = re.compile(r"^\s*(\d+)\s+\*\s+\*\s+\*") | |
+ success = re.compile( | |
+ r"^\s*(\d+)\s+([^\s]+)\s+\(([\d\.]+)\)\s+" | |
+ r"\*?([\d\.]*)[\sms]+\*?([\d\.]*)[\sms]+\*?([\d\.]*)[\sms]+" | |
+ ) | |
for line in lines: | |
logging.debug(line) | |
try: | |
fields = success.search(line) | |
if fields: | |
hop = int(fields.group(1)) | |
- hop_time = None if not len(fields.group(4)) else float(fields.group(4)) | |
- next_time = None if not len(fields.group(5)) else float(fields.group(5)) | |
- if next_time is not None: | |
- if hop_time is None or next_time < hop_time: | |
- hop_time = next_time | |
- next_time = None if not len(fields.group(6)) else float(fields.group(6)) | |
- if next_time is not None: | |
- if hop_time is None or next_time < hop_time: | |
- hop_time = next_time | |
- report_time = '{0:0.3f}'.format(hop_time) if hop_time is not None else '' | |
- ret[hop] = {'ms': report_time, 'hostname': fields.group(2), | |
- 'addr': fields.group(3)} | |
+ hop_time = ( | |
+ None if not len(fields.group(4)) else float(fields.group(4)) | |
+ ) | |
+ next_time = ( | |
+ None if not len(fields.group(5)) else float(fields.group(5)) | |
+ ) | |
+ if next_time is not None: | |
+ if hop_time is None or next_time < hop_time: | |
+ hop_time = next_time | |
+ next_time = ( | |
+ None if not len(fields.group(6)) else float(fields.group(6)) | |
+ ) | |
+ if next_time is not None: | |
+ if hop_time is None or next_time < hop_time: | |
+ hop_time = next_time | |
+ report_time = ( | |
+ "{0:0.3f}".format(hop_time) if hop_time is not None else "" | |
+ ) | |
+ ret[hop] = { | |
+ "ms": report_time, | |
+ "hostname": fields.group(2), | |
+ "addr": fields.group(3), | |
+ } | |
if hop > last_hop: | |
last_hop = hop | |
else: | |
fields = timeout.search(line) | |
if fields: | |
hop = int(fields.group(1)) | |
- ret[hop] = {'ms': '', 'hostname': '', 'addr': ''} | |
+ ret[hop] = {"ms": "", "hostname": "", "addr": ""} | |
else: | |
fields = dest.search(line) | |
if fields: | |
- ret[0] = {'ms': '', 'hostname': hostname, 'addr': fields.group(1)} | |
+ ret[0] = { | |
+ "ms": "", | |
+ "hostname": hostname, | |
+ "addr": fields.group(1), | |
+ } | |
except Exception: | |
pass | |
return last_hop, ret | |
def run_lighthouse_test(self, task): | |
--- internal/video_processing.py 2018-10-25 17:07:28.870875 +0000 | |
+++ internal/video_processing.py 2019-02-06 17:08:28.883984 +0000 | |
@@ -12,186 +12,237 @@ | |
VIDEO_SIZE = 400 | |
class VideoProcessing(object): | |
"""Interface into Chrome's remote dev tools protocol""" | |
+ | |
def __init__(self, options, job, task): | |
- self.video_path = os.path.join(task['dir'], task['video_subdirectory']) | |
- self.support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
+ self.video_path = os.path.join(task["dir"], task["video_subdirectory"]) | |
+ self.support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
self.options = options | |
self.job = job | |
self.task = task | |
def process(self): | |
"""Post Process the video""" | |
if os.path.isdir(self.video_path): | |
self.cap_frame_count(self.video_path, 50) | |
# Crop the video frames | |
- if not self.options.android and not self.options.iOS and \ | |
- 'mobile' in self.job and self.job['mobile'] and \ | |
- 'crop_pct' in self.task: | |
- crop = '{0:d}%x{1:d}%+0+0'.format(self.task['crop_pct']['width'], | |
- self.task['crop_pct']['height']) | |
- for path in sorted(glob.glob(os.path.join(self.video_path, 'ms_*.jpg'))): | |
+ if ( | |
+ not self.options.android | |
+ and not self.options.iOS | |
+ and "mobile" in self.job | |
+ and self.job["mobile"] | |
+ and "crop_pct" in self.task | |
+ ): | |
+ crop = "{0:d}%x{1:d}%+0+0".format( | |
+ self.task["crop_pct"]["width"], self.task["crop_pct"]["height"] | |
+ ) | |
+ for path in sorted( | |
+ glob.glob(os.path.join(self.video_path, "ms_*.jpg")) | |
+ ): | |
command = '{0} -define jpeg:dct-method=fast -crop {1} "{2}"'.format( | |
- self.job['image_magick']['mogrify'], crop, path) | |
+ self.job["image_magick"]["mogrify"], crop, path | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
# Make the initial screen shot the same size as the video | |
logging.debug("Resizing initial video frame") | |
from PIL import Image | |
- files = sorted(glob.glob(os.path.join(self.video_path, 'ms_*.jpg'))) | |
+ | |
+ files = sorted(glob.glob(os.path.join(self.video_path, "ms_*.jpg"))) | |
count = len(files) | |
width = 0 | |
height = 0 | |
if count > 1: | |
with Image.open(files[1]) as image: | |
width, height = image.size | |
command = '{0} "{1}" -resize {2:d}x{3:d} "{1}"'.format( | |
- self.job['image_magick']['convert'], | |
- files[0], width, height) | |
+ self.job["image_magick"]["convert"], files[0], width, height | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
# Eliminate duplicate frames ignoring 25 pixels across the bottom and | |
# right sides for status and scroll bars | |
crop = None | |
if width > 25 and height > 25: | |
- crop = '{0:d}x{1:d}+0+0'.format(width - 25, height - 25) | |
+ crop = "{0:d}x{1:d}+0+0".format(width - 25, height - 25) | |
logging.debug("Removing duplicate video frames") | |
- files = sorted(glob.glob(os.path.join(self.video_path, 'ms_*.jpg'))) | |
+ files = sorted(glob.glob(os.path.join(self.video_path, "ms_*.jpg"))) | |
count = len(files) | |
if count > 1: | |
baseline = files[0] | |
for index in xrange(1, count): | |
if self.frames_match(baseline, files[index], crop, 1, 0): | |
- logging.debug('Removing similar frame %s', os.path.basename(files[index])) | |
+ logging.debug( | |
+ "Removing similar frame %s", os.path.basename(files[index]) | |
+ ) | |
try: | |
os.remove(files[index]) | |
except Exception: | |
pass | |
else: | |
baseline = files[index] | |
# Compress to the target quality and size | |
- for path in sorted(glob.glob(os.path.join(self.video_path, 'ms_*.jpg'))): | |
+ for path in sorted(glob.glob(os.path.join(self.video_path, "ms_*.jpg"))): | |
thumb_size = VIDEO_SIZE | |
- if 'thumbsize' in self.job: | |
+ if "thumbsize" in self.job: | |
try: | |
- size = int(self.job['thumbsize']) | |
+ size = int(self.job["thumbsize"]) | |
if size > 0 and size <= 2000: | |
thumb_size = size | |
except Exception: | |
pass | |
- command = '{0} -define jpeg:dct-method=fast -resize {1:d}x{1:d} '\ | |
- '-quality {2:d} "{3}"'.format(self.job['image_magick']['mogrify'], | |
- thumb_size, self.job['imageQuality'], path) | |
+ command = ( | |
+ "{0} -define jpeg:dct-method=fast -resize {1:d}x{1:d} " | |
+ '-quality {2:d} "{3}"'.format( | |
+ self.job["image_magick"]["mogrify"], | |
+ thumb_size, | |
+ self.job["imageQuality"], | |
+ path, | |
+ ) | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
# Run visualmetrics against them | |
logging.debug("Processing video frames") | |
- if self.task['current_step'] == 1: | |
- filename = '{0:d}.{1:d}.histograms.json.gz'.format(self.task['run'], | |
- self.task['cached']) | |
+ if self.task["current_step"] == 1: | |
+ filename = "{0:d}.{1:d}.histograms.json.gz".format( | |
+ self.task["run"], self.task["cached"] | |
+ ) | |
else: | |
- filename = '{0:d}.{1:d}.{2:d}.histograms.json.gz'.format(self.task['run'], | |
- self.task['cached'], | |
- self.task['current_step']) | |
- histograms = os.path.join(self.task['dir'], filename) | |
- progress_file = os.path.join(self.task['dir'], self.task['prefix']) + \ | |
- '_visual_progress.json.gz' | |
+ filename = "{0:d}.{1:d}.{2:d}.histograms.json.gz".format( | |
+ self.task["run"], self.task["cached"], self.task["current_step"] | |
+ ) | |
+ histograms = os.path.join(self.task["dir"], filename) | |
+ progress_file = ( | |
+ os.path.join(self.task["dir"], self.task["prefix"]) | |
+ + "_visual_progress.json.gz" | |
+ ) | |
visualmetrics = os.path.join(self.support_path, "visualmetrics.py") | |
- args = ['python', visualmetrics, '-d', self.video_path, | |
- '--histogram', histograms, '--progress', progress_file] | |
- if 'renderVideo' in self.job and self.job['renderVideo']: | |
- video_out = os.path.join(self.task['dir'], self.task['prefix']) + \ | |
- '_rendered_video.mp4' | |
- args.extend(['--render', video_out]) | |
- if 'fullSizeVideo' in self.job and self.job['fullSizeVideo']: | |
- args.append('--full') | |
- if 'thumbsize' in self.job: | |
+ args = [ | |
+ "python", | |
+ visualmetrics, | |
+ "-d", | |
+ self.video_path, | |
+ "--histogram", | |
+ histograms, | |
+ "--progress", | |
+ progress_file, | |
+ ] | |
+ if "renderVideo" in self.job and self.job["renderVideo"]: | |
+ video_out = ( | |
+ os.path.join(self.task["dir"], self.task["prefix"]) | |
+ + "_rendered_video.mp4" | |
+ ) | |
+ args.extend(["--render", video_out]) | |
+ if "fullSizeVideo" in self.job and self.job["fullSizeVideo"]: | |
+ args.append("--full") | |
+ if "thumbsize" in self.job: | |
try: | |
- thumbsize = int(self.job['thumbsize']) | |
+ thumbsize = int(self.job["thumbsize"]) | |
if thumbsize > 0 and thumbsize <= 2000: | |
- args.extend(['--thumbsize', str(thumbsize)]) | |
+ args.extend(["--thumbsize", str(thumbsize)]) | |
except Exception: | |
pass | |
subprocess.call(args) | |
def frames_match(self, image1, image2, crop_region, fuzz_percent, max_differences): | |
"""Compare video frames""" | |
- crop = '' | |
+ crop = "" | |
if crop_region is not None: | |
- crop = '-crop {0} '.format(crop_region) | |
+ crop = "-crop {0} ".format(crop_region) | |
match = False | |
- command = '{0} {1} {2} {3}miff:- | {4} -metric AE -'.format( | |
- self.job['image_magick']['convert'], | |
- image1, image2, crop, | |
- self.job['image_magick']['compare']) | |
+ command = "{0} {1} {2} {3}miff:- | {4} -metric AE -".format( | |
+ self.job["image_magick"]["convert"], | |
+ image1, | |
+ image2, | |
+ crop, | |
+ self.job["image_magick"]["compare"], | |
+ ) | |
if fuzz_percent > 0: | |
- command += ' -fuzz {0:d}%'.format(fuzz_percent) | |
- command += ' null:'.format() | |
+ command += " -fuzz {0:d}%".format(fuzz_percent) | |
+ command += " null:".format() | |
compare = subprocess.Popen(command, stderr=subprocess.PIPE, shell=True) | |
_, err = compare.communicate() | |
- if re.match('^[0-9]+$', err): | |
+ if re.match("^[0-9]+$", err): | |
different_pixels = int(err) | |
if different_pixels <= max_differences: | |
match = True | |
return match | |
def cap_frame_count(self, directory, maxframes): | |
"""Limit the number of video frames using an decay for later times""" | |
- frames = sorted(glob.glob(os.path.join(directory, 'ms_*.jpg'))) | |
+ frames = sorted(glob.glob(os.path.join(directory, "ms_*.jpg"))) | |
frame_count = len(frames) | |
if frame_count > maxframes: | |
# First pass, sample all video frames after the first 5 seconds | |
# at 2fps, keeping the first 40% of the target | |
- logging.debug('Sampling 2fps: Reducing %d frames to target of %d...', | |
- frame_count, maxframes) | |
+ logging.debug( | |
+ "Sampling 2fps: Reducing %d frames to target of %d...", | |
+ frame_count, | |
+ maxframes, | |
+ ) | |
skip_frames = int(maxframes * 0.4) | |
self.sample_frames(frames, 500, 5000, skip_frames) | |
- frames = sorted(glob.glob(os.path.join(directory, 'ms_*.jpg'))) | |
+ frames = sorted(glob.glob(os.path.join(directory, "ms_*.jpg"))) | |
frame_count = len(frames) | |
if frame_count > maxframes: | |
# Second pass, sample all video frames after the first 10 seconds | |
# at 1fps, keeping the first 60% of the target | |
- logging.debug('Sampling 1fps: Reducing %d frames to target of %d...', | |
- frame_count, maxframes) | |
+ logging.debug( | |
+ "Sampling 1fps: Reducing %d frames to target of %d...", | |
+ frame_count, | |
+ maxframes, | |
+ ) | |
skip_frames = int(maxframes * 0.6) | |
self.sample_frames(frames, 1000, 10000, skip_frames) | |
- frames = sorted(glob.glob(os.path.join(directory, 'ms_*.jpg'))) | |
+ frames = sorted(glob.glob(os.path.join(directory, "ms_*.jpg"))) | |
frame_count = len(frames) | |
- logging.debug('%d frames final count with a target max of %d frames...', | |
- frame_count, maxframes) | |
+ logging.debug( | |
+ "%d frames final count with a target max of %d frames...", | |
+ frame_count, | |
+ maxframes, | |
+ ) | |
def sample_frames(self, frames, interval, start_ms, skip_frames): | |
"""Sample frames at a given interval""" | |
frame_count = len(frames) | |
if frame_count > 3: | |
# Always keep the first and last frames, only sample in the middle | |
first_frame = frames[0] | |
first_change = frames[1] | |
last_frame = frames[-1] | |
- match = re.compile(r'ms_(?P<ms>[0-9]+)\.') | |
+ match = re.compile(r"ms_(?P<ms>[0-9]+)\.") | |
matches = re.search(match, first_change) | |
first_change_time = 0 | |
if matches is not None: | |
- first_change_time = int(matches.groupdict().get('ms')) | |
+ first_change_time = int(matches.groupdict().get("ms")) | |
last_bucket = None | |
- logging.debug('Sapling frames in %d ms intervals after %d ms, ' | |
- 'skipping %d frames...', interval, | |
- first_change_time + start_ms, skip_frames) | |
+ logging.debug( | |
+ "Sapling frames in %d ms intervals after %d ms, " | |
+ "skipping %d frames...", | |
+ interval, | |
+ first_change_time + start_ms, | |
+ skip_frames, | |
+ ) | |
frame_count = 0 | |
for frame in frames: | |
matches = re.search(match, frame) | |
if matches is not None: | |
frame_count += 1 | |
- frame_time = int(matches.groupdict().get('ms')) | |
+ frame_time = int(matches.groupdict().get("ms")) | |
frame_bucket = int(math.floor(frame_time / interval)) | |
- if (frame_time > first_change_time + start_ms and | |
- frame_bucket == last_bucket and | |
- frame != first_frame and | |
- frame != first_change and | |
- frame != last_frame and | |
- frame_count > skip_frames): | |
- logging.debug('Removing sampled frame ' + frame) | |
+ if ( | |
+ frame_time > first_change_time + start_ms | |
+ and frame_bucket == last_bucket | |
+ and frame != first_frame | |
+ and frame != first_change | |
+ and frame != last_frame | |
+ and frame_count > skip_frames | |
+ ): | |
+ logging.debug("Removing sampled frame " + frame) | |
os.remove(frame) | |
last_bucket = frame_bucket | |
--- internal/traffic_shaping.py 2019-01-08 01:37:06.395287 +0000 | |
+++ internal/traffic_shaping.py 2019-02-06 17:08:29.247492 +0000 | |
@@ -7,41 +7,48 @@ | |
import platform | |
import re | |
import subprocess | |
import time | |
+ | |
class TrafficShaper(object): | |
"""Main traffic-shaper interface""" | |
+ | |
def __init__(self, options): | |
shaper_name = options.shaper | |
- self.support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
+ self.support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
self.shaper = None | |
if shaper_name is not None: | |
- if shaper_name == 'none': | |
+ if shaper_name == "none": | |
self.shaper = NoShaper() | |
- elif shaper_name[:5] == 'netem': | |
- parts = shaper_name.split(',') | |
+ elif shaper_name[:5] == "netem": | |
+ parts = shaper_name.split(",") | |
if_out = parts[1].strip() if len(parts) > 1 else None | |
if_in = None | |
if options.rndis: | |
- if_in = 'usb0' | |
+ if_in = "usb0" | |
elif options.simplert: | |
- if_in = 'tun0' | |
+ if_in = "tun0" | |
elif options.vpntether: | |
- if_in = 'tun0' | |
- self.shaper = NetEm(options=options, out_interface=if_out, in_interface=if_in) | |
- elif shaper_name[:6] == 'remote': | |
- parts = shaper_name.split(',') | |
+ if_in = "tun0" | |
+ self.shaper = NetEm( | |
+ options=options, out_interface=if_out, in_interface=if_in | |
+ ) | |
+ elif shaper_name[:6] == "remote": | |
+ parts = shaper_name.split(",") | |
if len(parts) == 4: | |
- self.shaper = RemoteDummynet(parts[1].strip(), parts[2].strip(), | |
- parts[3].strip()) | |
+ self.shaper = RemoteDummynet( | |
+ parts[1].strip(), parts[2].strip(), parts[3].strip() | |
+ ) | |
elif options.rndis: | |
self.shaper = NoShaper() | |
else: | |
plat = platform.system() | |
if plat == "Windows": | |
- winver = float(".".join(platform.version().split('.')[:2])) | |
+ winver = float(".".join(platform.version().split(".")[:2])) | |
if winver >= 6.3: | |
self.shaper = WinShaper() | |
else: | |
self.shaper = Dummynet() | |
elif plat == "Linux": | |
@@ -65,50 +72,56 @@ | |
def reset(self): | |
"""Disable traffic-shaping""" | |
ret = False | |
if self.shaper is not None: | |
- logging.debug('Resetting traffic shaping') | |
+ logging.debug("Resetting traffic shaping") | |
ret = self.shaper.reset() | |
return ret | |
def configure(self, job, task): | |
"""Enable traffic-shaping""" | |
ret = False | |
in_bps = 0 | |
- if 'bwIn' in job: | |
- in_bps = int(re.search(r'\d+', str(job['bwIn'])).group()) * 1000 | |
+ if "bwIn" in job: | |
+ in_bps = int(re.search(r"\d+", str(job["bwIn"])).group()) * 1000 | |
out_bps = 0 | |
- if 'bwOut' in job: | |
- out_bps = int(re.search(r'\d+', str(job['bwOut'])).group()) * 1000 | |
+ if "bwOut" in job: | |
+ out_bps = int(re.search(r"\d+", str(job["bwOut"])).group()) * 1000 | |
rtt = 0 | |
- if 'latency' in job: | |
- rtt = int(re.search(r'\d+', str(job['latency'])).group()) | |
- plr = .0 | |
- if 'plr' in job: | |
- plr = float(job['plr']) | |
+ if "latency" in job: | |
+ rtt = int(re.search(r"\d+", str(job["latency"])).group()) | |
+ plr = 0.0 | |
+ if "plr" in job: | |
+ plr = float(job["plr"]) | |
if self.shaper is not None: | |
# If a lighthouse test is running, force the Lighthouse 3G profile: | |
# https://github.com/GoogleChrome/lighthouse/blob/master/docs/throttling.md | |
# 1.6Mbps down, 750Kbps up, 150ms RTT | |
- if task['running_lighthouse'] and not job['lighthouse_throttle']: | |
+ if task["running_lighthouse"] and not job["lighthouse_throttle"]: | |
rtt = 150 | |
in_bps = 1600000 | |
out_bps = 750000 | |
- plr = .0 | |
- logging.debug('Configuring traffic shaping: %d/%d - %d ms, %0.2f%% plr', | |
- in_bps, out_bps, rtt, plr) | |
+ plr = 0.0 | |
+ logging.debug( | |
+ "Configuring traffic shaping: %d/%d - %d ms, %0.2f%% plr", | |
+ in_bps, | |
+ out_bps, | |
+ rtt, | |
+ plr, | |
+ ) | |
ret = self.shaper.configure(in_bps, out_bps, rtt, plr) | |
- job['interface'] = self.shaper.interface | |
+ job["interface"] = self.shaper.interface | |
return ret | |
# | |
# NoShaper | |
# | |
class NoShaper(object): | |
"""Allow resets but fail any explicit shaping""" | |
+ | |
def __init__(self): | |
self.interface = None | |
def install(self): | |
"""Install and configure the traffic-shaper""" | |
@@ -126,238 +139,340 @@ | |
"""Enable traffic-shaping""" | |
if in_bps > 0 or out_bps > 0 or rtt > 0 or plr > 0: | |
return False | |
return True | |
+ | |
# | |
# winshaper | |
# | |
class WinShaper(object): | |
"""Windows 8.1+ traffic-shaper using winshaper""" | |
+ | |
def __init__(self): | |
self.interface = None | |
self.in_buff = 20000000 | |
self.out_buff = 20000000 | |
- self.exe = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- "support", "winshaper", "shaper.exe") | |
+ self.exe = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "winshaper", | |
+ "shaper.exe", | |
+ ) | |
def shaper(self, args): | |
"""Run a shaper command with elevated permissions""" | |
from .os_util import run_elevated | |
- return run_elevated(self.exe, ' '.join(args)) == 0 | |
+ | |
+ return run_elevated(self.exe, " ".join(args)) == 0 | |
def install(self): | |
"""Install and configure the traffic-shaper""" | |
- return self.shaper(['install']) | |
+ return self.shaper(["install"]) | |
def remove(self): | |
"""Uninstall traffic-shaping""" | |
- return self.shaper(['remove']) | |
+ return self.shaper(["remove"]) | |
def reset(self): | |
"""Disable traffic-shaping""" | |
- return self.shaper(['reset']) | |
+ return self.shaper(["reset"]) | |
def configure(self, in_bps, out_bps, rtt, plr): | |
"""Enable traffic-shaping""" | |
- return self.shaper(['set', | |
- 'inbps={0:d}'.format(in_bps), | |
- 'outbps={0:d}'.format(out_bps), | |
- 'rtt={0:d}'.format(rtt), | |
- 'plr={0:.2f}'.format(plr), | |
- 'inbuff={0:d}'.format(self.in_buff), | |
- 'outbuff={0:d}'.format(self.out_buff)]) | |
+ return self.shaper( | |
+ [ | |
+ "set", | |
+ "inbps={0:d}".format(in_bps), | |
+ "outbps={0:d}".format(out_bps), | |
+ "rtt={0:d}".format(rtt), | |
+ "plr={0:.2f}".format(plr), | |
+ "inbuff={0:d}".format(self.in_buff), | |
+ "outbuff={0:d}".format(self.out_buff), | |
+ ] | |
+ ) | |
+ | |
# | |
# Dummynet | |
# | |
class Dummynet(object): | |
"""Dummynet support (windows only currently)""" | |
+ | |
def __init__(self): | |
self.interface = None | |
- self.in_pipe = '1' | |
- self.out_pipe = '2' | |
- self.exe = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- "support", "dummynet") | |
- if platform.machine().endswith('64'): | |
+ self.in_pipe = "1" | |
+ self.out_pipe = "2" | |
+ self.exe = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "dummynet" | |
+ ) | |
+ if platform.machine().endswith("64"): | |
self.exe = os.path.join(self.exe, "x64", "ipfw.exe") | |
else: | |
self.exe = os.path.join(self.exe, "x86", "ipfw.exe") | |
def ipfw(self, args): | |
"""Run a single ipfw command""" | |
from .os_util import run_elevated | |
- cmd = ' '.join(args) | |
- logging.debug('ipfw ' + cmd) | |
+ | |
+ cmd = " ".join(args) | |
+ logging.debug("ipfw " + cmd) | |
return run_elevated(self.exe, cmd) == 0 | |
def install(self): | |
"""Set up the pipes""" | |
- return self.ipfw(['-q', 'flush']) and\ | |
- self.ipfw(['-q', 'pipe', 'flush']) and\ | |
- self.ipfw(['pipe', self.in_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.ipfw(['pipe', self.out_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.ipfw(['queue', self.in_pipe, 'config', 'pipe', self.in_pipe, 'queue', '100', \ | |
- 'noerror', 'mask', 'dst-port', '0xffff']) and\ | |
- self.ipfw(['queue', self.out_pipe, 'config', 'pipe', self.out_pipe, 'queue', '100', \ | |
- 'noerror', 'mask', 'src-port', '0xffff']) and\ | |
- self.ipfw(['add', 'queue', self.in_pipe, 'ip', 'from', 'any', 'to', 'any', | |
- 'in']) and\ | |
- self.ipfw(['add', 'queue', self.out_pipe, 'ip', 'from', 'any', 'to', 'any', | |
- 'out']) and\ | |
- self.ipfw(['add', '60000', 'allow', 'ip', 'from', 'any', 'to', 'any']) | |
+ return ( | |
+ self.ipfw(["-q", "flush"]) | |
+ and self.ipfw(["-q", "pipe", "flush"]) | |
+ and self.ipfw(["pipe", self.in_pipe, "config", "delay", "0ms", "noerror"]) | |
+ and self.ipfw(["pipe", self.out_pipe, "config", "delay", "0ms", "noerror"]) | |
+ and self.ipfw( | |
+ [ | |
+ "queue", | |
+ self.in_pipe, | |
+ "config", | |
+ "pipe", | |
+ self.in_pipe, | |
+ "queue", | |
+ "100", | |
+ "noerror", | |
+ "mask", | |
+ "dst-port", | |
+ "0xffff", | |
+ ] | |
+ ) | |
+ and self.ipfw( | |
+ [ | |
+ "queue", | |
+ self.out_pipe, | |
+ "config", | |
+ "pipe", | |
+ self.out_pipe, | |
+ "queue", | |
+ "100", | |
+ "noerror", | |
+ "mask", | |
+ "src-port", | |
+ "0xffff", | |
+ ] | |
+ ) | |
+ and self.ipfw( | |
+ ["add", "queue", self.in_pipe, "ip", "from", "any", "to", "any", "in"] | |
+ ) | |
+ and self.ipfw( | |
+ ["add", "queue", self.out_pipe, "ip", "from", "any", "to", "any", "out"] | |
+ ) | |
+ and self.ipfw(["add", "60000", "allow", "ip", "from", "any", "to", "any"]) | |
+ ) | |
def remove(self): | |
"""clear the config""" | |
- return self.ipfw(['-q', 'flush']) and\ | |
- self.ipfw(['-q', 'pipe', 'flush']) | |
+ return self.ipfw(["-q", "flush"]) and self.ipfw(["-q", "pipe", "flush"]) | |
def reset(self): | |
"""Disable traffic-shaping""" | |
- return self.ipfw(['pipe', self.in_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.ipfw(['pipe', self.out_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.ipfw(['queue', self.in_pipe, 'config', 'pipe', self.in_pipe, 'queue', '100', \ | |
- 'noerror', 'mask', 'dst-port', '0xffff']) and\ | |
- self.ipfw(['queue', self.out_pipe, 'config', 'pipe', self.out_pipe, 'queue', '100', \ | |
- 'noerror', 'mask', 'dst-port', '0xffff']) | |
+ return ( | |
+ self.ipfw(["pipe", self.in_pipe, "config", "delay", "0ms", "noerror"]) | |
+ and self.ipfw(["pipe", self.out_pipe, "config", "delay", "0ms", "noerror"]) | |
+ and self.ipfw( | |
+ [ | |
+ "queue", | |
+ self.in_pipe, | |
+ "config", | |
+ "pipe", | |
+ self.in_pipe, | |
+ "queue", | |
+ "100", | |
+ "noerror", | |
+ "mask", | |
+ "dst-port", | |
+ "0xffff", | |
+ ] | |
+ ) | |
+ and self.ipfw( | |
+ [ | |
+ "queue", | |
+ self.out_pipe, | |
+ "config", | |
+ "pipe", | |
+ self.out_pipe, | |
+ "queue", | |
+ "100", | |
+ "noerror", | |
+ "mask", | |
+ "dst-port", | |
+ "0xffff", | |
+ ] | |
+ ) | |
+ ) | |
def configure(self, in_bps, out_bps, rtt, plr): | |
"""Enable traffic-shaping""" | |
# inbound connection | |
in_kbps = int(in_bps / 1000) | |
in_latency = rtt / 2 | |
if rtt % 2: | |
in_latency += 1 | |
- in_command = ['pipe', self.in_pipe, 'config'] | |
+ in_command = ["pipe", self.in_pipe, "config"] | |
if in_kbps > 0: | |
- in_command.extend(['bw', '{0:d}Kbit/s'.format(in_kbps)]) | |
+ in_command.extend(["bw", "{0:d}Kbit/s".format(in_kbps)]) | |
if in_latency >= 0: | |
- in_command.extend(['delay', '{0:d}ms'.format(in_latency)]) | |
+ in_command.extend(["delay", "{0:d}ms".format(in_latency)]) | |
# outbound connection | |
out_kbps = int(out_bps / 1000) | |
out_latency = rtt / 2 | |
- out_command = ['pipe', self.out_pipe, 'config'] | |
+ out_command = ["pipe", self.out_pipe, "config"] | |
if out_kbps > 0: | |
- out_command.extend(['bw', '{0:d}Kbit/s'.format(out_kbps)]) | |
+ out_command.extend(["bw", "{0:d}Kbit/s".format(out_kbps)]) | |
if out_latency >= 0: | |
- out_command.extend(['delay', '{0:d}ms'.format(out_latency)]) | |
+ out_command.extend(["delay", "{0:d}ms".format(out_latency)]) | |
# Packet loss get applied to the queues | |
plr = plr / 100.0 | |
- in_queue_command = ['queue', self.in_pipe, 'config', 'pipe', self.in_pipe, 'queue', '100'] | |
- out_queue_command = ['queue', self.out_pipe, 'config', 'pipe', self.out_pipe, | |
- 'queue', '100'] | |
+ in_queue_command = [ | |
+ "queue", | |
+ self.in_pipe, | |
+ "config", | |
+ "pipe", | |
+ self.in_pipe, | |
+ "queue", | |
+ "100", | |
+ ] | |
+ out_queue_command = [ | |
+ "queue", | |
+ self.out_pipe, | |
+ "config", | |
+ "pipe", | |
+ self.out_pipe, | |
+ "queue", | |
+ "100", | |
+ ] | |
if plr > 0.0 and plr <= 1.0: | |
- in_queue_command.extend(['plr', '{0:.4f}'.format(plr)]) | |
- out_queue_command.extend(['plr', '{0:.4f}'.format(plr)]) | |
- in_queue_command.extend(['mask', 'dst-port', '0xffff']) | |
- out_queue_command.extend(['mask', 'dst-port', '0xffff']) | |
- | |
- return self.ipfw(in_command) and\ | |
- self.ipfw(out_command) and\ | |
- self.ipfw(in_queue_command) and\ | |
- self.ipfw(out_queue_command) | |
+ in_queue_command.extend(["plr", "{0:.4f}".format(plr)]) | |
+ out_queue_command.extend(["plr", "{0:.4f}".format(plr)]) | |
+ in_queue_command.extend(["mask", "dst-port", "0xffff"]) | |
+ out_queue_command.extend(["mask", "dst-port", "0xffff"]) | |
+ | |
+ return ( | |
+ self.ipfw(in_command) | |
+ and self.ipfw(out_command) | |
+ and self.ipfw(in_queue_command) | |
+ and self.ipfw(out_queue_command) | |
+ ) | |
+ | |
# | |
# MacDummynet - Dummynet through pfctl | |
# | |
class MacDummynet(Dummynet): | |
"""Configure dummynet through pfctl and dnctl""" | |
+ | |
def __init__(self): | |
self.interface = None | |
- self.in_pipe = '1' | |
- self.out_pipe = '2' | |
+ self.in_pipe = "1" | |
+ self.out_pipe = "2" | |
self.token = None | |
def pfctl(self, args): | |
"""Run a single pfctl command""" | |
- cmd = ['sudo', 'pfctl'] | |
+ cmd = ["sudo", "pfctl"] | |
cmd.extend(args) | |
- logging.debug(' '.join(cmd)) | |
+ logging.debug(" ".join(cmd)) | |
return subprocess.call(cmd) == 0 | |
def dnctl(self, args): | |
"""Run a single dummynet command""" | |
- cmd = ['sudo', 'dnctl'] | |
+ cmd = ["sudo", "dnctl"] | |
cmd.extend(args) | |
- logging.debug(' '.join(cmd)) | |
+ logging.debug(" ".join(cmd)) | |
return subprocess.call(cmd) == 0 | |
def install(self): | |
"""Set up the pipes""" | |
- rules_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- "support", "osx", "pfctl.rules") | |
- return self.pfctl(['-E']) and\ | |
- self.dnctl(['-q', 'flush']) and\ | |
- self.dnctl(['-q', 'pipe', 'flush']) and\ | |
- self.dnctl(['pipe', self.in_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.dnctl(['pipe', self.out_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.pfctl(['-f', rules_file]) | |
- | |
+ rules_file = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "osx", "pfctl.rules" | |
+ ) | |
+ return ( | |
+ self.pfctl(["-E"]) | |
+ and self.dnctl(["-q", "flush"]) | |
+ and self.dnctl(["-q", "pipe", "flush"]) | |
+ and self.dnctl(["pipe", self.in_pipe, "config", "delay", "0ms", "noerror"]) | |
+ and self.dnctl(["pipe", self.out_pipe, "config", "delay", "0ms", "noerror"]) | |
+ and self.pfctl(["-f", rules_file]) | |
+ ) | |
def remove(self): | |
"""clear the config""" | |
- return self.dnctl(['-q', 'flush']) and\ | |
- self.dnctl(['-q', 'pipe', 'flush']) and\ | |
- self.pfctl(['-f', '/etc/pf.conf']) and\ | |
- self.pfctl(['-d']) | |
+ return ( | |
+ self.dnctl(["-q", "flush"]) | |
+ and self.dnctl(["-q", "pipe", "flush"]) | |
+ and self.pfctl(["-f", "/etc/pf.conf"]) | |
+ and self.pfctl(["-d"]) | |
+ ) | |
def reset(self): | |
"""Disable traffic-shaping""" | |
- return self.dnctl(['pipe', self.in_pipe, 'config', 'delay', '0ms', 'noerror']) and\ | |
- self.dnctl(['pipe', self.out_pipe, 'config', 'delay', '0ms', 'noerror']) | |
+ return self.dnctl( | |
+ ["pipe", self.in_pipe, "config", "delay", "0ms", "noerror"] | |
+ ) and self.dnctl(["pipe", self.out_pipe, "config", "delay", "0ms", "noerror"]) | |
def configure(self, in_bps, out_bps, rtt, plr): | |
"""Enable traffic-shaping""" | |
# inbound connection | |
in_kbps = int(in_bps / 1000) | |
in_latency = rtt / 2 | |
if rtt % 2: | |
in_latency += 1 | |
- in_command = ['pipe', self.in_pipe, 'config'] | |
+ in_command = ["pipe", self.in_pipe, "config"] | |
if in_kbps > 0: | |
- in_command.extend(['bw', '{0:d}Kbit/s'.format(in_kbps)]) | |
+ in_command.extend(["bw", "{0:d}Kbit/s".format(in_kbps)]) | |
if in_latency >= 0: | |
- in_command.extend(['delay', '{0:d}ms'.format(in_latency)]) | |
+ in_command.extend(["delay", "{0:d}ms".format(in_latency)]) | |
# outbound connection | |
out_kbps = int(out_bps / 1000) | |
out_latency = rtt / 2 | |
- out_command = ['pipe', self.out_pipe, 'config'] | |
+ out_command = ["pipe", self.out_pipe, "config"] | |
if out_kbps > 0: | |
- out_command.extend(['bw', '{0:d}Kbit/s'.format(out_kbps)]) | |
+ out_command.extend(["bw", "{0:d}Kbit/s".format(out_kbps)]) | |
if out_latency >= 0: | |
- out_command.extend(['delay', '{0:d}ms'.format(out_latency)]) | |
+ out_command.extend(["delay", "{0:d}ms".format(out_latency)]) | |
# Packet loss get applied to the queues | |
plr = plr / 100.0 | |
if plr > 0.0 and plr <= 1.0: | |
- in_command.extend(['plr', '{0:.4f}'.format(plr)]) | |
- out_command.extend(['plr', '{0:.4f}'.format(plr)]) | |
- | |
- return self.dnctl(in_command) and\ | |
- self.dnctl(out_command) | |
+ in_command.extend(["plr", "{0:.4f}".format(plr)]) | |
+ out_command.extend(["plr", "{0:.4f}".format(plr)]) | |
+ | |
+ return self.dnctl(in_command) and self.dnctl(out_command) | |
+ | |
# | |
# RemoteDummynet - Remote PC running dummynet with pre-configured pipes | |
# | |
class RemoteDummynet(Dummynet): | |
"""Allow resets but fail any explicit shaping""" | |
+ | |
def __init__(self, server, in_pipe, out_pipe): | |
Dummynet.__init__(self) | |
self.server = server | |
self.in_pipe = in_pipe | |
self.out_pipe = out_pipe | |
self.use_shell = bool(platform.system() == "Windows") | |
def ipfw(self, args): | |
"""Run a single command on the remote server""" | |
success = False | |
- cmd = ['ssh', '-o', 'StrictHostKeyChecking=no', | |
- 'root@{0}'.format(self.server), 'ipfw ' + ' '.join(args)] | |
- logging.debug(' '.join(cmd)) | |
+ cmd = [ | |
+ "ssh", | |
+ "-o", | |
+ "StrictHostKeyChecking=no", | |
+ "root@{0}".format(self.server), | |
+ "ipfw " + " ".join(args), | |
+ ] | |
+ logging.debug(" ".join(cmd)) | |
count = 0 | |
while not success and count < 30: | |
count += 1 | |
try: | |
subprocess.check_call(cmd, shell=self.use_shell) | |
@@ -366,21 +481,23 @@ | |
time.sleep(0.2) | |
return success | |
def install(self): | |
"""Install and configure the traffic-shaper""" | |
- return self.ipfw(['pipe', 'show', self.in_pipe]) | |
+ return self.ipfw(["pipe", "show", self.in_pipe]) | |
def remove(self): | |
"""Uninstall traffic-shaping""" | |
return True | |
+ | |
# | |
# netem | |
# | |
class NetEm(object): | |
"""Linux traffic-shaper using netem/tc""" | |
+ | |
def __init__(self, options, out_interface=None, in_interface=None): | |
self.interface = out_interface | |
self.in_interface = in_interface | |
self.options = options | |
@@ -389,39 +506,67 @@ | |
ret = False | |
# Figure out the default interface | |
try: | |
if self.interface is None: | |
- out = subprocess.check_output(['route']) | |
+ out = subprocess.check_output(["route"]) | |
routes = out.splitlines() | |
- match = re.compile(r'^([^\s]+)\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+'\ | |
- r'[^\s]+\s+[^\s]+\s+[^\s]+\s+([^\s]+)') | |
+ match = re.compile( | |
+ r"^([^\s]+)\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+" | |
+ r"[^\s]+\s+[^\s]+\s+[^\s]+\s+([^\s]+)" | |
+ ) | |
for route in routes: | |
fields = re.search(match, route) | |
if fields: | |
destination = fields.group(1) | |
- if destination == 'default': | |
+ if destination == "default": | |
self.interface = fields.group(2) | |
logging.debug("Default interface: %s", self.interface) | |
break | |
if self.interface: | |
if self.in_interface is None: | |
- self.in_interface = 'ifb0' | |
+ self.in_interface = "ifb0" | |
# Set up the ifb interface so inbound traffic can be shaped | |
- if self.in_interface.startswith('ifb'): | |
+ if self.in_interface.startswith("ifb"): | |
if self.options.dockerized: | |
- subprocess.call(['sudo', 'ip', 'link', 'add', 'ifb0', 'type', 'ifb']) | |
+ subprocess.call( | |
+ ["sudo", "ip", "link", "add", "ifb0", "type", "ifb"] | |
+ ) | |
else: | |
- subprocess.call(['sudo', 'modprobe', 'ifb']) | |
- subprocess.call(['sudo', 'ip', 'link', 'set', 'dev', 'ifb0', 'up']) | |
- subprocess.call(['sudo', 'tc', 'qdisc', 'add', 'dev', self.interface, | |
- 'ingress']) | |
- subprocess.call(['sudo', 'tc', 'filter', 'add', 'dev', self.interface, 'parent', | |
- 'ffff:', 'protocol', 'ip', 'u32', 'match', 'u32', '0', '0', | |
- 'flowid', '1:1', 'action', 'mirred', 'egress', 'redirect', | |
- 'dev', 'ifb0']) | |
+ subprocess.call(["sudo", "modprobe", "ifb"]) | |
+ subprocess.call(["sudo", "ip", "link", "set", "dev", "ifb0", "up"]) | |
+ subprocess.call( | |
+ ["sudo", "tc", "qdisc", "add", "dev", self.interface, "ingress"] | |
+ ) | |
+ subprocess.call( | |
+ [ | |
+ "sudo", | |
+ "tc", | |
+ "filter", | |
+ "add", | |
+ "dev", | |
+ self.interface, | |
+ "parent", | |
+ "ffff:", | |
+ "protocol", | |
+ "ip", | |
+ "u32", | |
+ "match", | |
+ "u32", | |
+ "0", | |
+ "0", | |
+ "flowid", | |
+ "1:1", | |
+ "action", | |
+ "mirred", | |
+ "egress", | |
+ "redirect", | |
+ "dev", | |
+ "ifb0", | |
+ ] | |
+ ) | |
self.reset() | |
ret = True | |
else: | |
logging.critical("Unable to identify default interface using 'route'") | |
except Exception as err: | |
@@ -429,23 +574,30 @@ | |
return ret | |
def remove(self): | |
"""Uninstall traffic-shaping""" | |
if self.interface: | |
- subprocess.call(['sudo', 'tc', 'qdisc', 'del', 'dev', self.interface, | |
- 'ingress']) | |
- subprocess.call(['sudo', 'ip', 'link', 'set', 'dev', 'ifb0', 'down']) | |
+ subprocess.call( | |
+ ["sudo", "tc", "qdisc", "del", "dev", self.interface, "ingress"] | |
+ ) | |
+ subprocess.call(["sudo", "ip", "link", "set", "dev", "ifb0", "down"]) | |
return True | |
def reset(self): | |
"""Disable traffic-shaping""" | |
ret = False | |
if self.interface is not None and self.in_interface is not None: | |
- ret = subprocess.call(['sudo', 'tc', 'qdisc', 'del', 'dev', self.in_interface, | |
- 'root']) == 0 and\ | |
- subprocess.call(['sudo', 'tc', 'qdisc', 'del', 'dev', self.interface, | |
- 'root']) == 0 | |
+ ret = ( | |
+ subprocess.call( | |
+ ["sudo", "tc", "qdisc", "del", "dev", self.in_interface, "root"] | |
+ ) | |
+ == 0 | |
+ and subprocess.call( | |
+ ["sudo", "tc", "qdisc", "del", "dev", self.interface, "root"] | |
+ ) | |
+ == 0 | |
+ ) | |
return ret | |
def configure(self, in_bps, out_bps, rtt, plr): | |
"""Enable traffic-shaping""" | |
ret = False | |
@@ -458,16 +610,26 @@ | |
return ret | |
def configure_interface(self, interface, bps, latency, plr): | |
"""Configure traffic-shaping for a single interface""" | |
ret = False | |
- args = ['sudo', 'tc', 'qdisc', 'add', 'dev', interface, 'root', | |
- 'netem', 'delay', '{0:d}ms'.format(latency)] | |
+ args = [ | |
+ "sudo", | |
+ "tc", | |
+ "qdisc", | |
+ "add", | |
+ "dev", | |
+ interface, | |
+ "root", | |
+ "netem", | |
+ "delay", | |
+ "{0:d}ms".format(latency), | |
+ ] | |
if bps > 0: | |
kbps = int(bps / 1000) | |
- args.extend(['rate', '{0:d}kbit'.format(kbps)]) | |
+ args.extend(["rate", "{0:d}kbit".format(kbps)]) | |
if plr > 0: | |
- args.extend(['loss', '{0:.2f}%'.format(plr)]) | |
- logging.debug(' '.join(args)) | |
+ args.extend(["loss", "{0:.2f}%".format(plr)]) | |
+ logging.debug(" ".join(args)) | |
ret = subprocess.call(args) == 0 | |
return ret | |
--- ws4py/__init__.py 2018-09-21 20:15:22.160801 +0000 | |
+++ ws4py/__init__.py 2019-02-06 17:08:29.324170 +0000 | |
@@ -29,17 +29,18 @@ | |
import logging | |
import logging.handlers as handlers | |
__author__ = "Sylvain Hellegouarch" | |
__version__ = "0.4.2.dev0" | |
-__all__ = ['WS_KEY', 'WS_VERSION', 'configure_logger', 'format_addresses'] | |
+__all__ = ["WS_KEY", "WS_VERSION", "configure_logger", "format_addresses"] | |
WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | |
WS_VERSION = (8, 13) | |
+ | |
def configure_logger(stdout=True, filepath=None, level=logging.INFO): | |
- logger = logging.getLogger('ws4py') | |
+ logger = logging.getLogger("ws4py") | |
logger.setLevel(level) | |
logfmt = logging.Formatter("[%(asctime)s] %(levelname)s %(message)s") | |
if filepath: | |
h = handlers.RotatingFileHandler(filepath, maxBytes=10485760, backupCount=3) | |
@@ -47,22 +48,29 @@ | |
h.setFormatter(logfmt) | |
logger.addHandler(h) | |
if stdout: | |
import sys | |
+ | |
h = logging.StreamHandler(sys.stdout) | |
h.setLevel(level) | |
h.setFormatter(logfmt) | |
logger.addHandler(h) | |
return logger | |
+ | |
def format_addresses(ws): | |
me = ws.local_address | |
peer = ws.peer_address | |
if isinstance(me, tuple) and isinstance(peer, tuple): | |
me_ip, me_port = ws.local_address | |
peer_ip, peer_port = ws.peer_address | |
- return "[Local => %s:%d | Remote => %s:%d]" % (me_ip, me_port, peer_ip, peer_port) | |
+ return "[Local => %s:%d | Remote => %s:%d]" % ( | |
+ me_ip, | |
+ me_port, | |
+ peer_ip, | |
+ peer_port, | |
+ ) | |
return "[Bound to '%s']" % me | |
--- internal/microsoft_edge.py 2019-02-06 16:55:03.352805 +0000 | |
+++ internal/microsoft_edge.py 2019-02-06 17:08:29.478948 +0000 | |
@@ -15,12 +15,14 @@ | |
import monotonic | |
import ujson as json | |
from .desktop_browser import DesktopBrowser | |
from .optimization_checks import OptimizationChecks | |
+ | |
class Edge(DesktopBrowser): | |
"""Microsoft Edge""" | |
+ | |
def __init__(self, path, options, job): | |
DesktopBrowser.__init__(self, path, options, job) | |
self.job = job | |
self.task = None | |
self.options = options | |
@@ -34,13 +36,18 @@ | |
self.extension_loaded = False | |
self.navigating = False | |
self.page = {} | |
self.requests = {} | |
self.last_activity = None | |
- self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js') | |
- self.wpt_etw_done = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'edge', 'wpt-etw', 'wpt-etw.done') | |
+ self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "js") | |
+ self.wpt_etw_done = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "edge", | |
+ "wpt-etw", | |
+ "wpt-etw.done", | |
+ ) | |
self.wpt_etw_proc = None | |
self.dns = {} | |
self.sockets = {} | |
self.socket_ports = {} | |
self.requests = {} | |
@@ -48,11 +55,11 @@ | |
self.CMarkup = [] | |
self.start = None | |
self.bodies_path = None | |
self.pid = None | |
self.supports_interactive = True | |
- self.start_page = 'http://127.0.0.1:8888/config.html' | |
+ self.start_page = "http://127.0.0.1:8888/config.html" | |
self.edge_registry_path = r"SOFTWARE\Classes\Local Settings\Software\Microsoft\Windows\CurrentVersion\AppContainer\Storage\microsoft.microsoftedge_8wekyb3d8bbwe\MicrosoftEdge\Privacy" | |
self.edge_registry_key_value = 0 | |
def reset(self): | |
"""Reset the ETW tracking""" | |
@@ -65,122 +72,153 @@ | |
def prepare(self, job, task): | |
"""Prepare the profile/OS for the browser""" | |
self.kill() | |
self.page = {} | |
self.requests = {} | |
- self.bodies_path = os.path.join(task['dir'], 'bodies') | |
+ self.bodies_path = os.path.join(task["dir"], "bodies") | |
if not os.path.isdir(self.bodies_path): | |
os.makedirs(self.bodies_path) | |
try: | |
import _winreg | |
- registry_key = _winreg.CreateKeyEx(_winreg.HKEY_CURRENT_USER, self.edge_registry_path, 0, _winreg.KEY_READ | _winreg.KEY_WRITE) | |
- self.edge_registry_key_value = _winreg.QueryValueEx(registry_key, "ClearBrowsingHistoryOnExit")[0] | |
- if not task['cached']: | |
+ | |
+ registry_key = _winreg.CreateKeyEx( | |
+ _winreg.HKEY_CURRENT_USER, | |
+ self.edge_registry_path, | |
+ 0, | |
+ _winreg.KEY_READ | _winreg.KEY_WRITE, | |
+ ) | |
+ self.edge_registry_key_value = _winreg.QueryValueEx( | |
+ registry_key, "ClearBrowsingHistoryOnExit" | |
+ )[0] | |
+ if not task["cached"]: | |
self.clear_cache() | |
- if task['cached'] or job['fvonly']: | |
- _winreg.SetValueEx(registry_key, "ClearBrowsingHistoryOnExit", 0, _winreg.REG_DWORD, 1) | |
+ if task["cached"] or job["fvonly"]: | |
+ _winreg.SetValueEx( | |
+ registry_key, "ClearBrowsingHistoryOnExit", 0, _winreg.REG_DWORD, 1 | |
+ ) | |
_winreg.CloseKey(registry_key) | |
else: | |
- _winreg.SetValueEx(registry_key, "ClearBrowsingHistoryOnExit", 0, _winreg.REG_DWORD, 0) | |
+ _winreg.SetValueEx( | |
+ registry_key, "ClearBrowsingHistoryOnExit", 0, _winreg.REG_DWORD, 0 | |
+ ) | |
_winreg.CloseKey(registry_key) | |
except Exception as err: | |
logging.exception("Error clearing cache: %s", str(err)) | |
DesktopBrowser.prepare(self, job, task) | |
# Prepare the config for the extension to query | |
- if self.job['message_server'] is not None: | |
+ if self.job["message_server"] is not None: | |
config = None | |
- names = ['block', | |
- 'block_domains', | |
- 'block_domains_except', | |
- 'headers', | |
- 'cookies', | |
- 'overrideHosts'] | |
+ names = [ | |
+ "block", | |
+ "block_domains", | |
+ "block_domains_except", | |
+ "headers", | |
+ "cookies", | |
+ "overrideHosts", | |
+ ] | |
for name in names: | |
if name in task and task[name]: | |
if config is None: | |
config = {} | |
config[name] = task[name] | |
- self.job['message_server'].config = config | |
+ self.job["message_server"].config = config | |
def get_driver(self, task): | |
"""Get the webdriver instance""" | |
from selenium import webdriver | |
from .os_util import run_elevated | |
- path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'edge') | |
- reg_file = os.path.join(path, 'keys.reg') | |
+ | |
+ path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "edge" | |
+ ) | |
+ reg_file = os.path.join(path, "keys.reg") | |
if os.path.isfile(reg_file): | |
- run_elevated('reg', 'IMPORT "{0}"'.format(reg_file)) | |
+ run_elevated("reg", 'IMPORT "{0}"'.format(reg_file)) | |
capabilities = webdriver.DesiredCapabilities.EDGE.copy() | |
- extension_src = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'edge', 'extension') | |
- extension_dir = os.path.join(os.environ.get('LOCALAPPDATA'), 'Packages', | |
- 'Microsoft.MicrosoftEdge_8wekyb3d8bbwe', | |
- 'LocalState', 'wptagent') | |
+ extension_src = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "edge", "extension" | |
+ ) | |
+ extension_dir = os.path.join( | |
+ os.environ.get("LOCALAPPDATA"), | |
+ "Packages", | |
+ "Microsoft.MicrosoftEdge_8wekyb3d8bbwe", | |
+ "LocalState", | |
+ "wptagent", | |
+ ) | |
if not os.path.isdir(extension_dir): | |
os.makedirs(extension_dir) | |
files = os.listdir(extension_src) | |
for file_name in files: | |
try: | |
src = os.path.join(extension_src, file_name) | |
if os.path.isfile(src): | |
shutil.copy(src, extension_dir) | |
except Exception: | |
pass | |
- capabilities['extensionPaths'] = [extension_dir] | |
- capabilities['ms:extensionPaths'] = [extension_dir] | |
+ capabilities["extensionPaths"] = [extension_dir] | |
+ capabilities["ms:extensionPaths"] = [extension_dir] | |
driver = webdriver.Edge(executable_path=self.path, capabilities=capabilities) | |
return driver | |
def launch(self, job, task): | |
"""Launch the browser""" | |
- if self.job['message_server'] is not None: | |
- self.job['message_server'].flush_messages() | |
+ if self.job["message_server"] is not None: | |
+ self.job["message_server"].flush_messages() | |
try: | |
- logging.debug('Launching browser : %s', self.path) | |
+ logging.debug("Launching browser : %s", self.path) | |
self.driver = self.get_driver(task) | |
- self.driver.set_page_load_timeout(task['time_limit']) | |
- if 'browserVersion' in self.driver.capabilities: | |
- self.browser_version = self.driver.capabilities['browserVersion'] | |
- elif 'version' in self.driver.capabilities: | |
- self.browser_version = self.driver.capabilities['version'] | |
+ self.driver.set_page_load_timeout(task["time_limit"]) | |
+ if "browserVersion" in self.driver.capabilities: | |
+ self.browser_version = self.driver.capabilities["browserVersion"] | |
+ elif "version" in self.driver.capabilities: | |
+ self.browser_version = self.driver.capabilities["version"] | |
DesktopBrowser.wait_for_idle(self) | |
self.driver.get(self.start_page) | |
- logging.debug('Resizing browser to %dx%d', task['width'], task['height']) | |
+ logging.debug("Resizing browser to %dx%d", task["width"], task["height"]) | |
self.driver.set_window_position(0, 0) | |
- self.driver.set_window_size(task['width'], task['height']) | |
+ self.driver.set_window_size(task["width"], task["height"]) | |
# Start the relay agent to capture ETW events | |
- wpt_etw_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'edge', 'wpt-etw', 'wpt-etw.exe') | |
+ wpt_etw_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "edge", | |
+ "wpt-etw", | |
+ "wpt-etw.exe", | |
+ ) | |
if os.path.isfile(self.wpt_etw_done): | |
try: | |
os.remove(self.wpt_etw_done) | |
except Exception: | |
pass | |
from .os_util import run_elevated | |
- self.wpt_etw_proc = run_elevated(wpt_etw_path, | |
- '--bodies "{0}"'.format(self.bodies_path), | |
- wait=False) | |
+ | |
+ self.wpt_etw_proc = run_elevated( | |
+ wpt_etw_path, '--bodies "{0}"'.format(self.bodies_path), wait=False | |
+ ) | |
self.wait_for_extension() | |
if self.extension_loaded: | |
# Figure out the native viewport size | |
- size = self.execute_js("return [window.innerWidth, window.innerHeight];") | |
+ size = self.execute_js( | |
+ "return [window.innerWidth, window.innerHeight];" | |
+ ) | |
if size is not None and len(size) == 2: | |
- task['actual_viewport'] = {"width": size[0], "height": size[1]} | |
- if 'adjust_viewport' in job and job['adjust_viewport']: | |
- delta_x = max(task['width'] - size[0], 0) | |
- delta_y = max(task['height'] - size[1], 0) | |
+ task["actual_viewport"] = {"width": size[0], "height": size[1]} | |
+ if "adjust_viewport" in job and job["adjust_viewport"]: | |
+ delta_x = max(task["width"] - size[0], 0) | |
+ delta_y = max(task["height"] - size[1], 0) | |
if delta_x or delta_y: | |
- width = task['width'] + delta_x | |
- height = task['height'] + delta_y | |
- logging.debug('Resizing browser to %dx%d', width, height) | |
+ width = task["width"] + delta_x | |
+ height = task["height"] + delta_y | |
+ logging.debug("Resizing browser to %dx%d", width, height) | |
self.driver.set_window_size(width, height) | |
DesktopBrowser.wait_for_idle(self) | |
else: | |
- task['error'] = 'Error waiting for wpt-etw to start. Make sure .net is installed' | |
+ task[ | |
+ "error" | |
+ ] = "Error waiting for wpt-etw to start. Make sure .net is installed" | |
except Exception as err: | |
- task['error'] = 'Error starting browser: {0}'.format(err.__str__()) | |
+ task["error"] = "Error starting browser: {0}".format(err.__str__()) | |
def stop(self, job, task): | |
"""Kill the browser""" | |
logging.debug("Stopping the browser...") | |
if self.driver is not None: | |
@@ -189,62 +227,82 @@ | |
except Exception: | |
pass | |
self.driver = None | |
DesktopBrowser.stop(self, job, task) | |
if self.wpt_etw_proc is not None: | |
- with open(self.wpt_etw_done, 'a'): | |
+ with open(self.wpt_etw_done, "a"): | |
os.utime(self.wpt_etw_done, None) | |
from .os_util import wait_for_elevated_process | |
+ | |
wait_for_elevated_process(self.wpt_etw_proc) | |
self.wpt_etw_proc = None | |
if os.path.isfile(self.wpt_etw_done): | |
try: | |
os.remove(self.wpt_etw_done) | |
except Exception: | |
pass | |
try: | |
import _winreg | |
- registry_key = _winreg.CreateKeyEx(_winreg.HKEY_CURRENT_USER, self.edge_registry_path, 0, _winreg.KEY_WRITE) | |
- _winreg.SetValueEx(registry_key, "ClearBrowsingHistoryOnExit", 0, _winreg.REG_DWORD, self.edge_registry_key_value) | |
- _winreg.CloseKey(registry_key) | |
+ | |
+ registry_key = _winreg.CreateKeyEx( | |
+ _winreg.HKEY_CURRENT_USER, self.edge_registry_path, 0, _winreg.KEY_WRITE | |
+ ) | |
+ _winreg.SetValueEx( | |
+ registry_key, | |
+ "ClearBrowsingHistoryOnExit", | |
+ 0, | |
+ _winreg.REG_DWORD, | |
+ self.edge_registry_key_value, | |
+ ) | |
+ _winreg.CloseKey(registry_key) | |
except Exception as err: | |
- logging.exception("Error resetting Edge cache settings: %s", str(err)) | |
+ logging.exception("Error resetting Edge cache settings: %s", str(err)) | |
self.kill() | |
if self.bodies_path is not None and os.path.isdir(self.bodies_path): | |
shutil.rmtree(self.bodies_path, ignore_errors=True) | |
def kill(self): | |
"""Kill any running instances""" | |
from .os_util import run_elevated | |
- processes = ['MicrosoftEdge.exe', 'MicrosoftEdgeCP.exe', 'plugin-container.exe', | |
- 'browser_broker.exe', 'smartscreen.exe', 'dllhost.exe'] | |
+ | |
+ processes = [ | |
+ "MicrosoftEdge.exe", | |
+ "MicrosoftEdgeCP.exe", | |
+ "plugin-container.exe", | |
+ "browser_broker.exe", | |
+ "smartscreen.exe", | |
+ "dllhost.exe", | |
+ ] | |
for exe in processes: | |
try: | |
- run_elevated('taskkill', '/F /T /IM {0}'.format(exe)) | |
+ run_elevated("taskkill", "/F /T /IM {0}".format(exe)) | |
except Exception: | |
pass | |
def clear_cache(self): | |
"""Clear the browser cache""" | |
- appdata = os.environ.get('LOCALAPPDATA') | |
- edge_dir = os.path.join(appdata, 'Packages', 'Microsoft.MicrosoftEdge_8wekyb3d8bbwe') | |
- temp_dir = os.path.join(edge_dir, 'AC') | |
+ appdata = os.environ.get("LOCALAPPDATA") | |
+ edge_dir = os.path.join( | |
+ appdata, "Packages", "Microsoft.MicrosoftEdge_8wekyb3d8bbwe" | |
+ ) | |
+ temp_dir = os.path.join(edge_dir, "AC") | |
if os.path.exists(temp_dir): | |
for directory in os.listdir(temp_dir): | |
- if directory.startswith('#!'): | |
+ if directory.startswith("#!"): | |
try: | |
- shutil.rmtree(os.path.join(temp_dir, directory), | |
- ignore_errors=True) | |
+ shutil.rmtree( | |
+ os.path.join(temp_dir, directory), ignore_errors=True | |
+ ) | |
except Exception: | |
pass | |
- cookie_dir = os.path.join(temp_dir, 'MicrosoftEdge', 'Cookies') | |
+ cookie_dir = os.path.join(temp_dir, "MicrosoftEdge", "Cookies") | |
if os.path.exists(cookie_dir): | |
try: | |
shutil.rmtree(cookie_dir, ignore_errors=True) | |
except Exception: | |
pass | |
- app_dir = os.path.join(edge_dir, 'AppData') | |
+ app_dir = os.path.join(edge_dir, "AppData") | |
if os.path.exists(app_dir): | |
try: | |
shutil.rmtree(app_dir, ignore_errors=True) | |
except Exception: | |
pass | |
@@ -256,390 +314,503 @@ | |
def run_task(self, task): | |
"""Run an individual test""" | |
if self.driver is not None and self.extension_loaded: | |
self.task = task | |
logging.debug("Running test") | |
- end_time = monotonic.monotonic() + task['test_time_limit'] | |
- task['current_step'] = 1 | |
+ end_time = monotonic.monotonic() + task["test_time_limit"] | |
+ task["current_step"] = 1 | |
recording = False | |
- while len(task['script']) and task['error'] is None and \ | |
- monotonic.monotonic() < end_time: | |
+ while ( | |
+ len(task["script"]) | |
+ and task["error"] is None | |
+ and monotonic.monotonic() < end_time | |
+ ): | |
self.prepare_task(task) | |
- command = task['script'].pop(0) | |
- if not recording and command['record']: | |
+ command = task["script"].pop(0) | |
+ if not recording and command["record"]: | |
recording = True | |
self.on_start_recording(task) | |
try: | |
self.process_command(command) | |
except Exception: | |
logging.exception("Exception running task") | |
- if command['record']: | |
+ if command["record"]: | |
self.wait_for_page_load() | |
- if not task['combine_steps'] or not len(task['script']): | |
+ if not task["combine_steps"] or not len(task["script"]): | |
self.on_stop_capture(task) | |
self.on_stop_recording(task) | |
recording = False | |
self.on_start_processing(task) | |
self.wait_for_processing(task) | |
self.step_complete(task) | |
- if task['log_data']: | |
+ if task["log_data"]: | |
# Move on to the next step | |
- task['current_step'] += 1 | |
+ task["current_step"] += 1 | |
self.event_name = None | |
- task['navigated'] = True | |
+ task["navigated"] = True | |
# Always navigate to about:blank after finishing in case the tab is | |
# remembered across sessions | |
try: | |
- self.driver.get('about:blank') | |
+ self.driver.get("about:blank") | |
except Exception: | |
- logging.debug('Webdriver exception navigating to about:blank after the test') | |
+ logging.debug( | |
+ "Webdriver exception navigating to about:blank after the test" | |
+ ) | |
self.task = None | |
def wait_for_extension(self): | |
"""Wait for the extension to send the started message""" | |
- if self.job['message_server'] is not None: | |
- end_time = monotonic.monotonic() + 30 | |
+ if self.job["message_server"] is not None: | |
+ end_time = monotonic.monotonic() + 30 | |
while monotonic.monotonic() < end_time: | |
try: | |
- message = self.job['message_server'].get_message(1) | |
+ message = self.job["message_server"].get_message(1) | |
logging.debug(message) | |
- logging.debug('Extension started') | |
+ logging.debug("Extension started") | |
self.extension_loaded = True | |
break | |
except Exception: | |
pass | |
def wait_for_page_load(self): | |
"""Wait for the onload event from the extension""" | |
- if self.job['message_server'] is not None: | |
+ if self.job["message_server"] is not None: | |
logging.debug("Waiting for page load...") | |
start_time = monotonic.monotonic() | |
- end_time = start_time + self.task['time_limit'] | |
+ end_time = start_time + self.task["time_limit"] | |
done = False | |
self.last_activity = None | |
while not done: | |
try: | |
- self.process_message(self.job['message_server'].get_message(1)) | |
+ self.process_message(self.job["message_server"].get_message(1)) | |
except Exception: | |
pass | |
now = monotonic.monotonic() | |
elapsed_test = now - start_time | |
if self.nav_error is not None: | |
done = True | |
if self.page_loaded is None: | |
- self.task['error'] = self.nav_error | |
- self.task['page_data']['result'] = 12999 | |
+ self.task["error"] = self.nav_error | |
+ self.task["page_data"]["result"] = 12999 | |
logging.debug("Page load navigation error: %s", self.nav_error) | |
elif now >= end_time: | |
done = True | |
logging.debug("Page load reached time limit") | |
# only consider it an error if we didn't get a page load event | |
if self.page_loaded is None: | |
- self.task['error'] = "Page Load Timeout" | |
- self.task['page_data']['result'] = 99998 | |
- elif self.last_activity is not None and \ | |
- ('time' not in self.job or elapsed_test > self.job['time']): | |
+ self.task["error"] = "Page Load Timeout" | |
+ self.task["page_data"]["result"] = 99998 | |
+ elif self.last_activity is not None and ( | |
+ "time" not in self.job or elapsed_test > self.job["time"] | |
+ ): | |
elapsed_activity = now - self.last_activity | |
- elapsed_page_load = now - self.page_loaded if self.page_loaded else 0 | |
- if elapsed_page_load >= 1 and elapsed_activity >= self.task['activity_time']: | |
+ elapsed_page_load = ( | |
+ now - self.page_loaded if self.page_loaded else 0 | |
+ ) | |
+ if ( | |
+ elapsed_page_load >= 1 | |
+ and elapsed_activity >= self.task["activity_time"] | |
+ ): | |
logging.debug("Page Load Activity Time Finished") | |
done = True | |
- elif self.task['error'] is not None: | |
- logging.debug("Page load error: %s", self.task['error']) | |
+ elif self.task["error"] is not None: | |
+ logging.debug("Page load error: %s", self.task["error"]) | |
done = True | |
def process_message(self, message): | |
"""Process a message from the extension""" | |
logging.debug(message) | |
if self.recording: | |
try: | |
- if 'Provider' in message and 'Event' in message and \ | |
- 'ts' in message and 'pid' in message: | |
- if message['Provider'] == 'Microsoft-IE': | |
+ if ( | |
+ "Provider" in message | |
+ and "Event" in message | |
+ and "ts" in message | |
+ and "pid" in message | |
+ ): | |
+ if message["Provider"] == "Microsoft-IE": | |
if self.pid is None: | |
- self.pid = message['pid'] | |
- if message['pid'] == self.pid: | |
+ self.pid = message["pid"] | |
+ if message["pid"] == self.pid: | |
self.process_ie_message(message) | |
- elif message['Provider'] == 'Microsoft-Windows-WinINet' and \ | |
- message['pid'] == self.pid: | |
+ elif ( | |
+ message["Provider"] == "Microsoft-Windows-WinINet" | |
+ and message["pid"] == self.pid | |
+ ): | |
self.process_wininet_message(message) | |
- elif message['Provider'] == 'Microsoft-IEFRAME': | |
+ elif message["Provider"] == "Microsoft-IEFRAME": | |
if self.pid is None: | |
- self.pid = message['pid'] | |
- if message['pid'] == self.pid: | |
+ self.pid = message["pid"] | |
+ if message["pid"] == self.pid: | |
self.process_ieframe_message(message) | |
except Exception: | |
pass | |
def process_ie_message(self, message): | |
"""Handle IE trace events""" | |
- if message['Event'] == 'Mshtml_CWindow_SuperNavigate2/Start': | |
+ if message["Event"] == "Mshtml_CWindow_SuperNavigate2/Start": | |
self.navigating = True | |
self.page_loaded = None | |
- if self.navigating and message['Event'] == 'Mshtml_CDoc_Navigation' and 'data' in message: | |
- if 'URL' in message['data'] and \ | |
- message['data']['URL'].startswith('http') and \ | |
- message['data']['URL'].startswith('http') and \ | |
- not message['data']['URL'].startswith('http://127.0.0.1:8888'): | |
- if 'EventContextId' in message['data']: | |
- self.pageContexts.append(message['data']['EventContextId']) | |
- self.CMarkup.append(message['data']['CMarkup']) | |
+ if ( | |
+ self.navigating | |
+ and message["Event"] == "Mshtml_CDoc_Navigation" | |
+ and "data" in message | |
+ ): | |
+ if ( | |
+ "URL" in message["data"] | |
+ and message["data"]["URL"].startswith("http") | |
+ and message["data"]["URL"].startswith("http") | |
+ and not message["data"]["URL"].startswith("http://127.0.0.1:8888") | |
+ ): | |
+ if "EventContextId" in message["data"]: | |
+ self.pageContexts.append(message["data"]["EventContextId"]) | |
+ self.CMarkup.append(message["data"]["CMarkup"]) | |
self.navigating = False | |
self.last_activity = monotonic.monotonic() | |
- if 'start' not in self.page: | |
+ if "start" not in self.page: | |
logging.debug("Navigation started") | |
- self.page['start'] = message['ts'] | |
- if 'url' not in self.page: | |
- self.page['url'] = message['data']['URL'] | |
+ self.page["start"] = message["ts"] | |
+ if "url" not in self.page: | |
+ self.page["url"] = message["data"]["URL"] | |
# Page Navigation events | |
- if 'start' in self.page and 'data' in message: | |
- elapsed = message['ts'] - self.page['start'] | |
- if message['Event'] == 'Mshtml_NotifyGoesInteractive/Start' and \ | |
- 'injectScript' in self.job and \ | |
- 'Markup' in message['data'] and \ | |
- message['data']['Markup'] in self.CMarkup: | |
- logging.debug("Injecting script: \n%s", self.job['injectScript']) | |
- self.execute_js(self.job['injectScript']) | |
- if 'EventContextId' in message['data'] and \ | |
- message['data']['EventContextId'] in self.pageContexts: | |
- if message['Event'] == 'Mshtml_WebOCEvents_DocumentComplete': | |
- if 'CMarkup' in message['data'] and message['data']['CMarkup'] in self.CMarkup: | |
- if 'loadEventStart' not in self.page: | |
- self.page['loadEventStart'] = elapsed | |
+ if "start" in self.page and "data" in message: | |
+ elapsed = message["ts"] - self.page["start"] | |
+ if ( | |
+ message["Event"] == "Mshtml_NotifyGoesInteractive/Start" | |
+ and "injectScript" in self.job | |
+ and "Markup" in message["data"] | |
+ and message["data"]["Markup"] in self.CMarkup | |
+ ): | |
+ logging.debug("Injecting script: \n%s", self.job["injectScript"]) | |
+ self.execute_js(self.job["injectScript"]) | |
+ if ( | |
+ "EventContextId" in message["data"] | |
+ and message["data"]["EventContextId"] in self.pageContexts | |
+ ): | |
+ if message["Event"] == "Mshtml_WebOCEvents_DocumentComplete": | |
+ if ( | |
+ "CMarkup" in message["data"] | |
+ and message["data"]["CMarkup"] in self.CMarkup | |
+ ): | |
+ if "loadEventStart" not in self.page: | |
+ self.page["loadEventStart"] = elapsed | |
logging.debug("Page Loaded") | |
self.page_loaded = monotonic.monotonic() | |
- if message['Event'] == 'Mshtml_CMarkup_DOMContentLoadedEvent_Start/Start': | |
- self.page['domContentLoadedEventStart'] = elapsed | |
- elif message['Event'] == 'Mshtml_CMarkup_DOMContentLoadedEvent_Stop/Stop': | |
- self.page['domContentLoadedEventEnd'] = elapsed | |
- elif message['Event'] == 'Mshtml_CMarkup_LoadEvent_Start/Start': | |
- self.page['loadEventStart'] = elapsed | |
- elif message['Event'] == 'Mshtml_CMarkup_LoadEvent_Stop/Stop': | |
- self.page['loadEventEnd'] = elapsed | |
+ if ( | |
+ message["Event"] | |
+ == "Mshtml_CMarkup_DOMContentLoadedEvent_Start/Start" | |
+ ): | |
+ self.page["domContentLoadedEventStart"] = elapsed | |
+ elif ( | |
+ message["Event"] == "Mshtml_CMarkup_DOMContentLoadedEvent_Stop/Stop" | |
+ ): | |
+ self.page["domContentLoadedEventEnd"] = elapsed | |
+ elif message["Event"] == "Mshtml_CMarkup_LoadEvent_Start/Start": | |
+ self.page["loadEventStart"] = elapsed | |
+ elif message["Event"] == "Mshtml_CMarkup_LoadEvent_Stop/Stop": | |
+ self.page["loadEventEnd"] = elapsed | |
logging.debug("Page loadEventEnd") | |
self.page_loaded = monotonic.monotonic() | |
def process_ieframe_message(self, message): | |
"""Handle IEFRAME trace events""" | |
- if 'start' in self.page and not self.pageContexts: | |
- elapsed = message['ts'] - self.page['start'] | |
- if message['Event'] == 'Shdocvw_BaseBrowser_DocumentComplete': | |
- self.page['loadEventStart'] = elapsed | |
- self.page['loadEventEnd'] = elapsed | |
+ if "start" in self.page and not self.pageContexts: | |
+ elapsed = message["ts"] - self.page["start"] | |
+ if message["Event"] == "Shdocvw_BaseBrowser_DocumentComplete": | |
+ self.page["loadEventStart"] = elapsed | |
+ self.page["loadEventEnd"] = elapsed | |
self.page_loaded = monotonic.monotonic() | |
logging.debug("Page loaded (Document Complete)") | |
def process_wininet_message(self, message): | |
"""Handle WinInet trace events""" | |
- if 'Activity' in message: | |
+ if "Activity" in message: | |
self.last_activity = monotonic.monotonic() | |
self.process_dns_message(message) | |
self.process_socket_message(message) | |
self.process_request_message(message) | |
def process_dns_message(self, message): | |
"""Handle DNS events""" | |
- event_id = message['Activity'] | |
- if message['Event'] == 'WININET_DNS_QUERY/Start' and event_id not in self.dns: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- if 'data' in message and 'HostName' in message['data']: | |
- self.dns[event_id] = {'host': message['data']['HostName']} | |
- if message['Event'] == 'WININET_DNS_QUERY/Stop' and event_id in self.dns: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- if 'data' in message and 'AddressList' in message['data']: | |
- self.dns[event_id]['addresses'] = list( | |
- filter(None, message['data']['AddressList'].split(';'))) | |
- if message['Event'] == 'Wininet_Getaddrinfo/Start' and event_id in self.dns: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.dns[event_id]['start'] = message['ts'] - self.page['start'] | |
- if message['Event'] == 'Wininet_Getaddrinfo/Stop' and event_id in self.dns: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.dns[event_id]['end'] = message['ts'] - self.page['start'] | |
+ event_id = message["Activity"] | |
+ if message["Event"] == "WININET_DNS_QUERY/Start" and event_id not in self.dns: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ if "data" in message and "HostName" in message["data"]: | |
+ self.dns[event_id] = {"host": message["data"]["HostName"]} | |
+ if message["Event"] == "WININET_DNS_QUERY/Stop" and event_id in self.dns: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ if "data" in message and "AddressList" in message["data"]: | |
+ self.dns[event_id]["addresses"] = list( | |
+ filter(None, message["data"]["AddressList"].split(";")) | |
+ ) | |
+ if message["Event"] == "Wininet_Getaddrinfo/Start" and event_id in self.dns: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.dns[event_id]["start"] = message["ts"] - self.page["start"] | |
+ if message["Event"] == "Wininet_Getaddrinfo/Stop" and event_id in self.dns: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.dns[event_id]["end"] = message["ts"] - self.page["start"] | |
def process_socket_message(self, message): | |
"""Handle socket connect events""" | |
- event_id = message['Activity'] | |
- if message['Event'] == 'Wininet_SocketConnect/Start' and event_id not in self.sockets: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.sockets[event_id] = {'start': message['ts'] - self.page['start'], | |
- 'index': len(self.sockets)} | |
- if 'data' in message and 'Socket' in message['data']: | |
- self.sockets[event_id]['socket'] = message['data']['Socket'] | |
- if 'data' in message and 'SourcePort' in message['data']: | |
+ event_id = message["Activity"] | |
+ if ( | |
+ message["Event"] == "Wininet_SocketConnect/Start" | |
+ and event_id not in self.sockets | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.sockets[event_id] = { | |
+ "start": message["ts"] - self.page["start"], | |
+ "index": len(self.sockets), | |
+ } | |
+ if "data" in message and "Socket" in message["data"]: | |
+ self.sockets[event_id]["socket"] = message["data"]["Socket"] | |
+ if "data" in message and "SourcePort" in message["data"]: | |
# keep a mapping from the source port to the connection activity id | |
- self.socket_ports[message['data']['SourcePort']] = event_id | |
- self.sockets[event_id]['srcPort'] = message['data']['SourcePort'] | |
- if 'data' in message and 'RemoteAddressIndex' in message['data']: | |
- self.sockets[event_id]['addrIndex'] = message['data']['RemoteAddressIndex'] | |
- if message['Event'] == 'Wininet_SocketConnect/Stop' and event_id in self.sockets: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.sockets[event_id]['end'] = message['ts'] - self.page['start'] | |
- if message['Event'] == 'WININET_TCP_CONNECTION/Start' and event_id in self.sockets: | |
- if 'ServerName' in message['data']: | |
- self.sockets[event_id]['host'] = message['data']['ServerName'] | |
- if message['Event'] == 'WININET_TCP_CONNECTION/Stop' and event_id in self.sockets: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- if 'end' not in self.sockets[event_id]: | |
- self.sockets[event_id]['end'] = message['ts'] - self.page['start'] | |
- if 'srcPort' in self.sockets[event_id] and \ | |
- self.sockets[event_id]['srcPort'] in self.socket_ports: | |
- del self.socket_ports[self.sockets[event_id]['srcPort']] | |
- if message['Event'] == 'WININET_TCP_CONNECTION/Fail' and event_id in self.sockets: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- if 'end' not in self.sockets[event_id]: | |
- self.sockets[event_id]['end'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Error' in message['data']: | |
- self.sockets[event_id]['error'] = message['data']['Error'] | |
- if message['Event'] == 'Wininet_Connect/Stop': | |
- if 'data' in message and 'Socket' in message['data'] and \ | |
- message['data']['Socket'] in self.socket_ports: | |
- connect_id = self.socket_ports[message['data']['Socket']] | |
+ self.socket_ports[message["data"]["SourcePort"]] = event_id | |
+ self.sockets[event_id]["srcPort"] = message["data"]["SourcePort"] | |
+ if "data" in message and "RemoteAddressIndex" in message["data"]: | |
+ self.sockets[event_id]["addrIndex"] = message["data"][ | |
+ "RemoteAddressIndex" | |
+ ] | |
+ if ( | |
+ message["Event"] == "Wininet_SocketConnect/Stop" | |
+ and event_id in self.sockets | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.sockets[event_id]["end"] = message["ts"] - self.page["start"] | |
+ if ( | |
+ message["Event"] == "WININET_TCP_CONNECTION/Start" | |
+ and event_id in self.sockets | |
+ ): | |
+ if "ServerName" in message["data"]: | |
+ self.sockets[event_id]["host"] = message["data"]["ServerName"] | |
+ if ( | |
+ message["Event"] == "WININET_TCP_CONNECTION/Stop" | |
+ and event_id in self.sockets | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ if "end" not in self.sockets[event_id]: | |
+ self.sockets[event_id]["end"] = message["ts"] - self.page["start"] | |
+ if ( | |
+ "srcPort" in self.sockets[event_id] | |
+ and self.sockets[event_id]["srcPort"] in self.socket_ports | |
+ ): | |
+ del self.socket_ports[self.sockets[event_id]["srcPort"]] | |
+ if ( | |
+ message["Event"] == "WININET_TCP_CONNECTION/Fail" | |
+ and event_id in self.sockets | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ if "end" not in self.sockets[event_id]: | |
+ self.sockets[event_id]["end"] = message["ts"] - self.page["start"] | |
+ if "data" in message and "Error" in message["data"]: | |
+ self.sockets[event_id]["error"] = message["data"]["Error"] | |
+ if message["Event"] == "Wininet_Connect/Stop": | |
+ if ( | |
+ "data" in message | |
+ and "Socket" in message["data"] | |
+ and message["data"]["Socket"] in self.socket_ports | |
+ ): | |
+ connect_id = self.socket_ports[message["data"]["Socket"]] | |
if connect_id in self.sockets: | |
- if 'LocalAddress' in message['data']: | |
- self.sockets[connect_id]['local'] = message['data']['LocalAddress'] | |
- if 'RemoteAddress' in message['data']: | |
- self.sockets[connect_id]['remote'] = message['data']['RemoteAddress'] | |
+ if "LocalAddress" in message["data"]: | |
+ self.sockets[connect_id]["local"] = message["data"][ | |
+ "LocalAddress" | |
+ ] | |
+ if "RemoteAddress" in message["data"]: | |
+ self.sockets[connect_id]["remote"] = message["data"][ | |
+ "RemoteAddress" | |
+ ] | |
# TLS | |
- if message['Event'] == 'WININET_HTTPS_NEGOTIATION/Start' and event_id in self.sockets: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.sockets[event_id]['tlsStart'] = message['ts'] - self.page['start'] | |
- if message['Event'] == 'WININET_HTTPS_NEGOTIATION/Stop' and event_id in self.sockets: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.sockets[event_id]['tlsEnd'] = message['ts'] - self.page['start'] | |
+ if ( | |
+ message["Event"] == "WININET_HTTPS_NEGOTIATION/Start" | |
+ and event_id in self.sockets | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.sockets[event_id]["tlsStart"] = message["ts"] - self.page["start"] | |
+ if ( | |
+ message["Event"] == "WININET_HTTPS_NEGOTIATION/Stop" | |
+ and event_id in self.sockets | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.sockets[event_id]["tlsEnd"] = message["ts"] - self.page["start"] | |
def process_request_message(self, message): | |
"""Handle request-level messages""" | |
- event_id = message['Activity'] | |
+ event_id = message["Activity"] | |
# Request created (not necessarily sent) | |
- if message['Event'] == 'Wininet_SendRequest/Start': | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
+ if message["Event"] == "Wininet_SendRequest/Start": | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
if event_id not in self.requests: | |
- self.requests[event_id] = {'activity': event_id, 'id': len(self.requests) + 1} | |
- if 'created' not in self.requests[event_id]: | |
- self.requests[event_id]['created'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'AddressName' in message['data'] and \ | |
- 'url' not in self.requests[event_id]: | |
- self.requests[event_id]['url'] = message['data']['AddressName'] | |
+ self.requests[event_id] = { | |
+ "activity": event_id, | |
+ "id": len(self.requests) + 1, | |
+ } | |
+ if "created" not in self.requests[event_id]: | |
+ self.requests[event_id]["created"] = message["ts"] - self.page["start"] | |
+ if ( | |
+ "data" in message | |
+ and "AddressName" in message["data"] | |
+ and "url" not in self.requests[event_id] | |
+ ): | |
+ self.requests[event_id]["url"] = message["data"]["AddressName"] | |
# Headers and size of outbound request - Length, Headers | |
- if message['Event'] == 'WININET_REQUEST_HEADER': | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
+ if message["Event"] == "WININET_REQUEST_HEADER": | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
if event_id not in self.requests: | |
- self.requests[event_id] = {'activity': event_id, 'id': len(self.requests) + 1} | |
- if 'created' not in self.requests[event_id]: | |
- self.requests[event_id]['created'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Headers' in message['data']: | |
- self.requests[event_id]['outHeaders'] = message['data']['Headers'] | |
- self.requests[event_id]['outBytes'] = len(self.requests[event_id]['outHeaders']) | |
- if 'start' not in self.requests[event_id]: | |
- self.requests[event_id]['start'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Length' in message['data'] and \ | |
- 'outBytes' not in self.requests[event_id]: | |
- length = int(message['data']['Length']) | |
+ self.requests[event_id] = { | |
+ "activity": event_id, | |
+ "id": len(self.requests) + 1, | |
+ } | |
+ if "created" not in self.requests[event_id]: | |
+ self.requests[event_id]["created"] = message["ts"] - self.page["start"] | |
+ if "data" in message and "Headers" in message["data"]: | |
+ self.requests[event_id]["outHeaders"] = message["data"]["Headers"] | |
+ self.requests[event_id]["outBytes"] = len( | |
+ self.requests[event_id]["outHeaders"] | |
+ ) | |
+ if "start" not in self.requests[event_id]: | |
+ self.requests[event_id]["start"] = message["ts"] - self.page["start"] | |
+ if ( | |
+ "data" in message | |
+ and "Length" in message["data"] | |
+ and "outBytes" not in self.requests[event_id] | |
+ ): | |
+ length = int(message["data"]["Length"]) | |
if length > 0: | |
- self.requests[event_id]['outBytes'] = length | |
+ self.requests[event_id]["outBytes"] = length | |
# size of outbound request (and actual start) - Size | |
- if message['Event'] == 'Wininet_SendRequest_Main': | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
+ if message["Event"] == "Wininet_SendRequest_Main": | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
if event_id not in self.requests: | |
- self.requests[event_id] = {'activity': event_id, 'id': len(self.requests) + 1} | |
- if 'created' not in self.requests[event_id]: | |
- self.requests[event_id]['created'] = message['ts'] - self.page['start'] | |
- self.requests[event_id]['start'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Size' in message['data']: | |
- length = int(message['data']['Size']) | |
+ self.requests[event_id] = { | |
+ "activity": event_id, | |
+ "id": len(self.requests) + 1, | |
+ } | |
+ if "created" not in self.requests[event_id]: | |
+ self.requests[event_id]["created"] = message["ts"] - self.page["start"] | |
+ self.requests[event_id]["start"] = message["ts"] - self.page["start"] | |
+ if "data" in message and "Size" in message["data"]: | |
+ length = int(message["data"]["Size"]) | |
if length > 0: | |
- self.requests[event_id]['outBytes'] = int(message['data']['Size']) | |
+ self.requests[event_id]["outBytes"] = int(message["data"]["Size"]) | |
# Maps request to source port of connection "Socket" == local port | |
- if message['Event'] == 'Wininet_LookupConnection/Stop': | |
- if 'data' in message and 'Socket' in message['data'] and \ | |
- message['data']['Socket'] in self.socket_ports: | |
+ if message["Event"] == "Wininet_LookupConnection/Stop": | |
+ if ( | |
+ "data" in message | |
+ and "Socket" in message["data"] | |
+ and message["data"]["Socket"] in self.socket_ports | |
+ ): | |
if event_id not in self.requests: | |
- self.requests[event_id] = {'activity': event_id, 'id': len(self.requests) + 1} | |
- connect_id = self.socket_ports[message['data']['Socket']] | |
- self.requests[event_id]['connection'] = connect_id | |
+ self.requests[event_id] = { | |
+ "activity": event_id, | |
+ "id": len(self.requests) + 1, | |
+ } | |
+ connect_id = self.socket_ports[message["data"]["Socket"]] | |
+ self.requests[event_id]["connection"] = connect_id | |
if connect_id not in self.sockets: | |
- self.sockets[connect_id] = {'index': len(self.sockets)} | |
- if 'requests' not in self.sockets[connect_id]: | |
- self.sockets[connect_id]['requests'] = [] | |
- self.sockets[connect_id]['requests'].append(event_id) | |
+ self.sockets[connect_id] = {"index": len(self.sockets)} | |
+ if "requests" not in self.sockets[connect_id]: | |
+ self.sockets[connect_id]["requests"] = [] | |
+ self.sockets[connect_id]["requests"].append(event_id) | |
# Headers and size of headers - Length, Headers | |
- if message['Event'] == 'WININET_RESPONSE_HEADER' and event_id in self.requests: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.requests[event_id]['end'] = message['ts'] - self.page['start'] | |
- if 'firstByte' not in self.requests[event_id]: | |
- self.requests[event_id]['firstByte'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Headers' in message['data']: | |
- self.requests[event_id]['inHeaders'] = message['data']['Headers'] | |
- if 'data' in message and 'Length' in message['data']: | |
- self.requests[event_id]['inHeadersLen'] = int(message['data']['Length']) | |
+ if message["Event"] == "WININET_RESPONSE_HEADER" and event_id in self.requests: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.requests[event_id]["end"] = message["ts"] - self.page["start"] | |
+ if "firstByte" not in self.requests[event_id]: | |
+ self.requests[event_id]["firstByte"] = ( | |
+ message["ts"] - self.page["start"] | |
+ ) | |
+ if "data" in message and "Headers" in message["data"]: | |
+ self.requests[event_id]["inHeaders"] = message["data"]["Headers"] | |
+ if "data" in message and "Length" in message["data"]: | |
+ self.requests[event_id]["inHeadersLen"] = int(message["data"]["Length"]) | |
# inbound bytes (ttfb, keep incrementing end) - Size | |
- if message['Event'] == 'Wininet_ReadData' and event_id in self.requests: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- if 'start' in self.requests[event_id]: | |
- self.requests[event_id]['end'] = message['ts'] - self.page['start'] | |
- if 'firstByte' not in self.requests[event_id]: | |
- self.requests[event_id]['firstByte'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Size' in message['data']: | |
- bytesIn = int(message['data']['Size']) | |
- if 'inBytes' not in self.requests[event_id]: | |
- self.requests[event_id]['inBytes'] = 0 | |
- self.requests[event_id]['inBytes'] += bytesIn | |
- if 'chunks' not in self.requests[event_id]: | |
- self.requests[event_id]['chunks'] = [] | |
- ts = message['ts'] - self.page['start'] | |
- self.requests[event_id]['chunks'].append( {'ts': ts, 'bytes': bytesIn}) | |
- if message['Event'] == 'WININET_STREAM_DATA_INDICATED' and event_id in self.requests: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- self.requests[event_id]['protocol'] = 'HTTP/2' | |
- if 'start' in self.requests[event_id]: | |
- self.requests[event_id]['end'] = message['ts'] - self.page['start'] | |
- if 'firstByte' not in self.requests[event_id]: | |
- self.requests[event_id]['firstByte'] = message['ts'] - self.page['start'] | |
- if 'data' in message and 'Size' in message['data']: | |
- bytesIn = int(message['data']['Size']) | |
- if 'inBytes' not in self.requests[event_id]: | |
- self.requests[event_id]['inBytes'] = 0 | |
- self.requests[event_id]['inBytes'] += bytesIn | |
- if 'chunks' not in self.requests[event_id]: | |
- self.requests[event_id]['chunks'] = [] | |
- ts = message['ts'] - self.page['start'] | |
- self.requests[event_id]['chunks'].append( {'ts': ts, 'bytes': bytesIn}) | |
+ if message["Event"] == "Wininet_ReadData" and event_id in self.requests: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ if "start" in self.requests[event_id]: | |
+ self.requests[event_id]["end"] = message["ts"] - self.page["start"] | |
+ if "firstByte" not in self.requests[event_id]: | |
+ self.requests[event_id]["firstByte"] = ( | |
+ message["ts"] - self.page["start"] | |
+ ) | |
+ if "data" in message and "Size" in message["data"]: | |
+ bytesIn = int(message["data"]["Size"]) | |
+ if "inBytes" not in self.requests[event_id]: | |
+ self.requests[event_id]["inBytes"] = 0 | |
+ self.requests[event_id]["inBytes"] += bytesIn | |
+ if "chunks" not in self.requests[event_id]: | |
+ self.requests[event_id]["chunks"] = [] | |
+ ts = message["ts"] - self.page["start"] | |
+ self.requests[event_id]["chunks"].append( | |
+ {"ts": ts, "bytes": bytesIn} | |
+ ) | |
+ if ( | |
+ message["Event"] == "WININET_STREAM_DATA_INDICATED" | |
+ and event_id in self.requests | |
+ ): | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ self.requests[event_id]["protocol"] = "HTTP/2" | |
+ if "start" in self.requests[event_id]: | |
+ self.requests[event_id]["end"] = message["ts"] - self.page["start"] | |
+ if "firstByte" not in self.requests[event_id]: | |
+ self.requests[event_id]["firstByte"] = ( | |
+ message["ts"] - self.page["start"] | |
+ ) | |
+ if "data" in message and "Size" in message["data"]: | |
+ bytesIn = int(message["data"]["Size"]) | |
+ if "inBytes" not in self.requests[event_id]: | |
+ self.requests[event_id]["inBytes"] = 0 | |
+ self.requests[event_id]["inBytes"] += bytesIn | |
+ if "chunks" not in self.requests[event_id]: | |
+ self.requests[event_id]["chunks"] = [] | |
+ ts = message["ts"] - self.page["start"] | |
+ self.requests[event_id]["chunks"].append( | |
+ {"ts": ts, "bytes": bytesIn} | |
+ ) | |
# completely finished | |
- if message['Event'] == 'Wininet_UsageLogRequest' and \ | |
- event_id in self.requests and 'data' in message: | |
- if 'URL' in message['data']: | |
- self.requests[event_id]['url'] = message['data']['URL'] | |
- if 'Verb' in message['data']: | |
- self.requests[event_id]['verb'] = message['data']['Verb'] | |
- if 'Status' in message['data']: | |
- self.requests[event_id]['status'] = message['data']['Status'] | |
- if 'RequestHeaders' in message['data']: | |
- self.requests[event_id]['outHeaders'] = message['data']['RequestHeaders'] | |
- if 'ResponseHeaders' in message['data']: | |
- self.requests[event_id]['inHeaders'] = message['data']['ResponseHeaders'] | |
+ if ( | |
+ message["Event"] == "Wininet_UsageLogRequest" | |
+ and event_id in self.requests | |
+ and "data" in message | |
+ ): | |
+ if "URL" in message["data"]: | |
+ self.requests[event_id]["url"] = message["data"]["URL"] | |
+ if "Verb" in message["data"]: | |
+ self.requests[event_id]["verb"] = message["data"]["Verb"] | |
+ if "Status" in message["data"]: | |
+ self.requests[event_id]["status"] = message["data"]["Status"] | |
+ if "RequestHeaders" in message["data"]: | |
+ self.requests[event_id]["outHeaders"] = message["data"][ | |
+ "RequestHeaders" | |
+ ] | |
+ if "ResponseHeaders" in message["data"]: | |
+ self.requests[event_id]["inHeaders"] = message["data"][ | |
+ "ResponseHeaders" | |
+ ] | |
# Headers done - Direction changing for capture (no params) | |
- if message['Event'] == 'Wininet_SendRequest/Stop' and event_id in self.requests: | |
- if 'start' not in self.page: | |
- self.page['start'] = message['ts'] | |
- if 'end' not in self.requests[event_id]: | |
- self.requests[event_id]['end'] = message['ts'] - self.page['start'] | |
+ if message["Event"] == "Wininet_SendRequest/Stop" and event_id in self.requests: | |
+ if "start" not in self.page: | |
+ self.page["start"] = message["ts"] | |
+ if "end" not in self.requests[event_id]: | |
+ self.requests[event_id]["end"] = message["ts"] - self.page["start"] | |
def execute_js(self, script): | |
"""Run javascipt""" | |
ret = None | |
if self.driver is not None: | |
@@ -654,136 +825,154 @@ | |
"""Execute one of our js scripts""" | |
ret = None | |
script = None | |
script_file_path = os.path.join(self.script_dir, file_name) | |
if os.path.isfile(script_file_path): | |
- with open(script_file_path, 'rb') as script_file: | |
+ with open(script_file_path, "rb") as script_file: | |
script = script_file.read() | |
if script is not None: | |
try: | |
self.driver.set_script_timeout(30) | |
- ret = self.driver.execute_script('return ' + script) | |
+ ret = self.driver.execute_script("return " + script) | |
except Exception: | |
pass | |
if ret is not None: | |
logging.debug(ret) | |
return ret | |
def collect_browser_metrics(self, task): | |
"""Collect all of the in-page browser metrics that we need""" | |
# Trigger a message to start writing the interactive periods asynchronously | |
if self.supports_interactive: | |
- self.execute_js('window.postMessage({ wptagent: "GetInteractivePeriods"}, "*");') | |
+ self.execute_js( | |
+ 'window.postMessage({ wptagent: "GetInteractivePeriods"}, "*");' | |
+ ) | |
# Collect teh regular browser metrics | |
logging.debug("Collecting user timing metrics") | |
- user_timing = self.run_js_file('user_timing.js') | |
+ user_timing = self.run_js_file("user_timing.js") | |
if user_timing is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join(task["dir"], task["prefix"] + "_timed_events.json.gz") | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(user_timing)) | |
logging.debug("Collecting page-level metrics") | |
- page_data = self.run_js_file('page_data.js') | |
+ page_data = self.run_js_file("page_data.js") | |
if page_data is not None: | |
- task['page_data'].update(page_data) | |
- if 'customMetrics' in self.job: | |
+ task["page_data"].update(page_data) | |
+ if "customMetrics" in self.job: | |
self.driver.set_script_timeout(30) | |
custom_metrics = {} | |
- for name in self.job['customMetrics']: | |
+ for name in self.job["customMetrics"]: | |
logging.debug("Collecting custom metric %s", name) | |
- script = 'var wptCustomMetric = function() {' +\ | |
- self.job['customMetrics'][name] +\ | |
- '};try{return wptCustomMetric();}catch(e){};' | |
+ script = ( | |
+ "var wptCustomMetric = function() {" | |
+ + self.job["customMetrics"][name] | |
+ + "};try{return wptCustomMetric();}catch(e){};" | |
+ ) | |
try: | |
custom_metrics[name] = self.driver.execute_script(script) | |
if custom_metrics[name] is not None: | |
logging.debug(custom_metrics[name]) | |
except Exception: | |
pass | |
- path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join(task["dir"], task["prefix"] + "_metrics.json.gz") | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(custom_metrics)) | |
# Wait for the interactive periods to be written | |
if self.supports_interactive: | |
end_time = monotonic.monotonic() + 10 | |
interactive = None | |
while interactive is None and monotonic.monotonic() < end_time: | |
interactive = self.execute_js( | |
- 'return document.getElementById("wptagentLongTasks").innerText;') | |
+ 'return document.getElementById("wptagentLongTasks").innerText;' | |
+ ) | |
if interactive is None: | |
time.sleep(0.2) | |
if interactive is not None and len(interactive): | |
- interactive_file = os.path.join(task['dir'], | |
- task['prefix'] + '_interactive.json.gz') | |
- with gzip.open(interactive_file, 'wb', 7) as f_out: | |
+ interactive_file = os.path.join( | |
+ task["dir"], task["prefix"] + "_interactive.json.gz" | |
+ ) | |
+ with gzip.open(interactive_file, "wb", 7) as f_out: | |
f_out.write(interactive) | |
def prepare_task(self, task): | |
"""Format the file prefixes for multi-step testing""" | |
- if task['current_step'] == 1: | |
- task['prefix'] = task['task_prefix'] | |
- task['video_subdirectory'] = task['task_video_prefix'] | |
+ if task["current_step"] == 1: | |
+ task["prefix"] = task["task_prefix"] | |
+ task["video_subdirectory"] = task["task_video_prefix"] | |
else: | |
- task['prefix'] = '{0}_{1:d}'.format(task['task_prefix'], task['current_step']) | |
- task['video_subdirectory'] = '{0}_{1:d}'.format(task['task_video_prefix'], | |
- task['current_step']) | |
- if task['video_subdirectory'] not in task['video_directories']: | |
- task['video_directories'].append(task['video_subdirectory']) | |
+ task["prefix"] = "{0}_{1:d}".format( | |
+ task["task_prefix"], task["current_step"] | |
+ ) | |
+ task["video_subdirectory"] = "{0}_{1:d}".format( | |
+ task["task_video_prefix"], task["current_step"] | |
+ ) | |
+ if task["video_subdirectory"] not in task["video_directories"]: | |
+ task["video_directories"].append(task["video_subdirectory"]) | |
if self.event_name is not None: | |
- task['step_name'] = self.event_name | |
+ task["step_name"] = self.event_name | |
else: | |
- task['step_name'] = 'Step_{0:d}'.format(task['current_step']) | |
+ task["step_name"] = "Step_{0:d}".format(task["current_step"]) | |
def on_start_recording(self, task): | |
"""Notification that we are about to start an operation that needs to be recorded""" | |
# Clear the state | |
self.page = {} | |
self.requests = {} | |
self.reset() | |
- task['page_data'] = {'date': time.time()} | |
- task['page_result'] = None | |
- task['run_start_time'] = monotonic.monotonic() | |
- if self.job['message_server'] is not None: | |
- self.job['message_server'].flush_messages() | |
- if self.browser_version is not None and 'browserVersion' not in task['page_data']: | |
- task['page_data']['browserVersion'] = self.browser_version | |
- task['page_data']['browser_version'] = self.browser_version | |
+ task["page_data"] = {"date": time.time()} | |
+ task["page_result"] = None | |
+ task["run_start_time"] = monotonic.monotonic() | |
+ if self.job["message_server"] is not None: | |
+ self.job["message_server"].flush_messages() | |
+ if ( | |
+ self.browser_version is not None | |
+ and "browserVersion" not in task["page_data"] | |
+ ): | |
+ task["page_data"]["browserVersion"] = self.browser_version | |
+ task["page_data"]["browser_version"] = self.browser_version | |
self.recording = True | |
self.navigating = True | |
now = monotonic.monotonic() | |
if self.page_loaded is not None: | |
self.page_loaded = now | |
DesktopBrowser.on_start_recording(self, task) | |
- logging.debug('Starting measurement') | |
- task['start_time'] = datetime.utcnow() | |
+ logging.debug("Starting measurement") | |
+ task["start_time"] = datetime.utcnow() | |
def on_stop_capture(self, task): | |
"""Do any quick work to stop things that are capturing data""" | |
DesktopBrowser.on_stop_capture(self, task) | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
hero_elements = None | |
custom_hero_selectors = {} | |
- if 'heroElements' in self.job: | |
- custom_hero_selectors = self.job['heroElements'] | |
- logging.debug('Collecting hero element positions') | |
- with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: | |
+ if "heroElements" in self.job: | |
+ custom_hero_selectors = self.job["heroElements"] | |
+ logging.debug("Collecting hero element positions") | |
+ with open( | |
+ os.path.join(self.script_dir, "hero_elements.js"), "rb" | |
+ ) as script_file: | |
hero_elements_script = script_file.read() | |
- script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' | |
+ script = ( | |
+ hero_elements_script + "(" + json.dumps(custom_hero_selectors) + ")" | |
+ ) | |
hero_elements = self.execute_js(script) | |
if hero_elements is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join( | |
+ task["dir"], task["prefix"] + "_hero_elements.json.gz" | |
+ ) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(hero_elements)) | |
def on_stop_recording(self, task): | |
"""Notification that we are done with recording""" | |
self.recording = False | |
DesktopBrowser.on_stop_recording(self, task) | |
- if self.job['pngScreenShot']: | |
- screen_shot = os.path.join(task['dir'], task['prefix'] + '_screen.png') | |
+ if self.job["pngScreenShot"]: | |
+ screen_shot = os.path.join(task["dir"], task["prefix"] + "_screen.png") | |
self.grab_screenshot(screen_shot, png=True) | |
else: | |
- screen_shot = os.path.join(task['dir'], task['prefix'] + '_screen.jpg') | |
+ screen_shot = os.path.join(task["dir"], task["prefix"] + "_screen.jpg") | |
self.grab_screenshot(screen_shot, png=False, resize=600) | |
# Collect end of test data from the browser | |
self.collect_browser_metrics(task) | |
def on_start_processing(self, task): | |
@@ -797,81 +986,91 @@ | |
def process_command(self, command): | |
"""Process an individual script command""" | |
logging.debug("Processing script command:") | |
logging.debug(command) | |
- if command['command'] == 'navigate': | |
- self.task['page_data']['URL'] = command['target'] | |
- self.task['url'] = command['target'] | |
- url = str(command['target']).replace('"', '\"') | |
+ if command["command"] == "navigate": | |
+ self.task["page_data"]["URL"] = command["target"] | |
+ self.task["url"] = command["target"] | |
+ url = str(command["target"]).replace('"', '"') | |
script = 'window.location="{0}";'.format(url) | |
script = self.prepare_script_for_record(script) | |
try: | |
self.driver.set_script_timeout(30) | |
self.driver.execute_script(script) | |
except Exception: | |
pass | |
self.page_loaded = None | |
- elif command['command'] == 'logdata': | |
- self.task['combine_steps'] = False | |
- if int(re.search(r'\d+', str(command['target'])).group()): | |
+ elif command["command"] == "logdata": | |
+ self.task["combine_steps"] = False | |
+ if int(re.search(r"\d+", str(command["target"])).group()): | |
logging.debug("Data logging enabled") | |
- self.task['log_data'] = True | |
+ self.task["log_data"] = True | |
else: | |
logging.debug("Data logging disabled") | |
- self.task['log_data'] = False | |
- elif command['command'] == 'combinesteps': | |
- self.task['log_data'] = True | |
- self.task['combine_steps'] = True | |
- elif command['command'] == 'seteventname': | |
- self.event_name = command['target'] | |
- elif command['command'] == 'exec': | |
- script = command['target'] | |
- if command['record']: | |
+ self.task["log_data"] = False | |
+ elif command["command"] == "combinesteps": | |
+ self.task["log_data"] = True | |
+ self.task["combine_steps"] = True | |
+ elif command["command"] == "seteventname": | |
+ self.event_name = command["target"] | |
+ elif command["command"] == "exec": | |
+ script = command["target"] | |
+ if command["record"]: | |
script = self.prepare_script_for_record(script) | |
try: | |
self.driver.set_script_timeout(30) | |
self.driver.execute_script(script) | |
except Exception: | |
pass | |
- elif command['command'] == 'sleep': | |
- delay = min(60, max(0, int(re.search(r'\d+', str(command['target'])).group()))) | |
+ elif command["command"] == "sleep": | |
+ delay = min( | |
+ 60, max(0, int(re.search(r"\d+", str(command["target"])).group())) | |
+ ) | |
if delay > 0: | |
time.sleep(delay) | |
- elif command['command'] == 'setabm': | |
- self.task['stop_at_onload'] = \ | |
- bool('target' in command and int(re.search(r'\d+', | |
- str(command['target'])).group()) == 0) | |
- elif command['command'] == 'setactivitytimeout': | |
- if 'target' in command: | |
- milliseconds = int(re.search(r'\d+', str(command['target'])).group()) | |
- self.task['activity_time'] = max(0, min(30, float(milliseconds) / 1000.0)) | |
- elif command['command'] == 'setuseragent': | |
- self.task['user_agent_string'] = command['target'] | |
- elif command['command'] == 'setcookie': | |
- if 'target' in command and 'value' in command: | |
- url = command['target'].strip() | |
- cookie = command['value'] | |
- pos = cookie.find(';') | |
+ elif command["command"] == "setabm": | |
+ self.task["stop_at_onload"] = bool( | |
+ "target" in command | |
+ and int(re.search(r"\d+", str(command["target"])).group()) == 0 | |
+ ) | |
+ elif command["command"] == "setactivitytimeout": | |
+ if "target" in command: | |
+ milliseconds = int(re.search(r"\d+", str(command["target"])).group()) | |
+ self.task["activity_time"] = max( | |
+ 0, min(30, float(milliseconds) / 1000.0) | |
+ ) | |
+ elif command["command"] == "setuseragent": | |
+ self.task["user_agent_string"] = command["target"] | |
+ elif command["command"] == "setcookie": | |
+ if "target" in command and "value" in command: | |
+ url = command["target"].strip() | |
+ cookie = command["value"] | |
+ pos = cookie.find(";") | |
if pos > 0: | |
cookie = cookie[:pos] | |
- pos = cookie.find('=') | |
+ pos = cookie.find("=") | |
if pos > 0: | |
name = cookie[:pos].strip() | |
- value = cookie[pos+1:].strip() | |
+ value = cookie[pos + 1 :].strip() | |
if len(name) and len(value) and len(url): | |
try: | |
- self.driver.add_cookie({'url': url, 'name': name, 'value': value}) | |
+ self.driver.add_cookie( | |
+ {"url": url, "name": name, "value": value} | |
+ ) | |
except Exception: | |
pass | |
try: | |
import win32inet | |
+ | |
cookie_string = cookie | |
- if cookie.find('xpires') == -1: | |
+ if cookie.find("xpires") == -1: | |
expires = datetime.utcnow() + timedelta(days=30) | |
- expires_string = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") | |
- cookie_string += '; expires={0}'.format(expires_string) | |
+ expires_string = expires.strftime( | |
+ "%a, %d %b %Y %H:%M:%S GMT" | |
+ ) | |
+ cookie_string += "; expires={0}".format(expires_string) | |
logging.debug("Setting cookie: %s", cookie_string) | |
win32inet.InternetSetCookie(url, None, cookie_string) | |
except Exception as err: | |
logging.exception("Error setting cookie: %s", str(err)) | |
@@ -887,516 +1086,601 @@ | |
"""Save the screen shot (png or jpeg)""" | |
if self.driver is not None: | |
try: | |
data = self.driver.get_screenshot_as_png() | |
if data is not None: | |
- resize_string = '' if not resize else '-resize {0:d}x{0:d} '.format(resize) | |
+ resize_string = ( | |
+ "" if not resize else "-resize {0:d}x{0:d} ".format(resize) | |
+ ) | |
if png: | |
- with open(path, 'wb') as image_file: | |
+ with open(path, "wb") as image_file: | |
image_file.write(data) | |
if len(resize_string): | |
- cmd = '{0} -format png -define png:color-type=2 '\ | |
- '-depth 8 {1}"{2}"'.format(self.job['image_magick']['mogrify'], | |
- resize_string, path) | |
+ cmd = ( | |
+ "{0} -format png -define png:color-type=2 " | |
+ '-depth 8 {1}"{2}"'.format( | |
+ self.job["image_magick"]["mogrify"], | |
+ resize_string, | |
+ path, | |
+ ) | |
+ ) | |
logging.debug(cmd) | |
subprocess.call(cmd, shell=True) | |
else: | |
- tmp_file = path + '.png' | |
- with open(tmp_file, 'wb') as image_file: | |
+ tmp_file = path + ".png" | |
+ with open(tmp_file, "wb") as image_file: | |
image_file.write(data) | |
command = '{0} "{1}" {2}-quality {3:d} "{4}"'.format( | |
- self.job['image_magick']['convert'], | |
- tmp_file, resize_string, self.job['imageQuality'], path) | |
+ self.job["image_magick"]["convert"], | |
+ tmp_file, | |
+ resize_string, | |
+ self.job["imageQuality"], | |
+ path, | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(tmp_file): | |
try: | |
os.remove(tmp_file) | |
except Exception: | |
pass | |
except Exception as err: | |
- logging.debug('Exception grabbing screen shot: %s', str(err)) | |
+ logging.debug("Exception grabbing screen shot: %s", str(err)) | |
def process_requests(self, task): | |
"""Convert all of the request and page events into the format needed for WPT""" | |
result = {} | |
self.process_sockets() | |
- result['requests'] = self.process_raw_requests() | |
- result['pageData'] = self.calculate_page_stats(result['requests']) | |
- self.check_optimization(task, result['requests'], result['pageData']) | |
- devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') | |
- with gzip.open(devtools_file, 'wb', 7) as f_out: | |
+ result["requests"] = self.process_raw_requests() | |
+ result["pageData"] = self.calculate_page_stats(result["requests"]) | |
+ self.check_optimization(task, result["requests"], result["pageData"]) | |
+ devtools_file = os.path.join( | |
+ task["dir"], task["prefix"] + "_devtools_requests.json.gz" | |
+ ) | |
+ with gzip.open(devtools_file, "wb", 7) as f_out: | |
json.dump(result, f_out) | |
def process_sockets(self): | |
"""Map/claim the DNS and socket-connection level details""" | |
# Fill in the host and address for any sockets that had a DNS entry | |
# (even if the DNS did not require a lookup) | |
for event_id in self.sockets: | |
if event_id in self.dns: | |
- if 'host' not in self.sockets[event_id] and 'host' in self.dns[event_id]: | |
- self.sockets[event_id]['host'] = self.dns[event_id]['host'] | |
- if 'addresses' in self.dns[event_id]: | |
- self.sockets[event_id]['addresses'] = self.dns[event_id]['addresses'] | |
- if 'addrIndex' in self.sockets[event_id]: | |
- index = self.sockets[event_id]['addrIndex'] | |
- if index < len(self.dns[event_id]['addresses']): | |
- self.sockets[event_id]['address'] = \ | |
- self.dns[event_id]['addresses'][index] | |
+ if ( | |
+ "host" not in self.sockets[event_id] | |
+ and "host" in self.dns[event_id] | |
+ ): | |
+ self.sockets[event_id]["host"] = self.dns[event_id]["host"] | |
+ if "addresses" in self.dns[event_id]: | |
+ self.sockets[event_id]["addresses"] = self.dns[event_id][ | |
+ "addresses" | |
+ ] | |
+ if "addrIndex" in self.sockets[event_id]: | |
+ index = self.sockets[event_id]["addrIndex"] | |
+ if index < len(self.dns[event_id]["addresses"]): | |
+ self.sockets[event_id]["address"] = self.dns[event_id][ | |
+ "addresses" | |
+ ][index] | |
# Copy over the connect and dns timings to the first request on a given | |
# socket. | |
for event_id in self.sockets: | |
try: | |
- if 'requests' in self.sockets[event_id]: | |
+ if "requests" in self.sockets[event_id]: | |
first_request = None | |
first_request_time = None | |
- count = len(self.sockets[event_id]['requests']) | |
+ count = len(self.sockets[event_id]["requests"]) | |
for i in xrange(0, count): | |
- rid = self.sockets[event_id]['requests'][i] | |
- if rid in self.requests and 'start' in self.requests[rid]: | |
- if first_request is None or \ | |
- self.requests[rid]['start'] < first_request_time: | |
+ rid = self.sockets[event_id]["requests"][i] | |
+ if rid in self.requests and "start" in self.requests[rid]: | |
+ if ( | |
+ first_request is None | |
+ or self.requests[rid]["start"] < first_request_time | |
+ ): | |
first_request = rid | |
- first_request_time = self.requests[rid]['start'] | |
+ first_request_time = self.requests[rid]["start"] | |
if first_request is not None: | |
- if 'start' in self.sockets[event_id]: | |
- self.requests[first_request]['connectStart'] = \ | |
- self.sockets[event_id]['start'] | |
- if 'end' in self.sockets[event_id]: | |
- self.requests[first_request]['connectEnd'] = \ | |
- self.sockets[event_id]['end'] | |
- if 'tlsStart' in self.sockets[event_id]: | |
- self.requests[first_request]['tlsStart'] = \ | |
- self.sockets[event_id]['tlsStart'] | |
- if 'tlsEnd' in self.sockets[event_id]: | |
- self.requests[first_request]['tlsEnd'] = \ | |
- self.sockets[event_id]['tlsEnd'] | |
+ if "start" in self.sockets[event_id]: | |
+ self.requests[first_request]["connectStart"] = self.sockets[ | |
+ event_id | |
+ ]["start"] | |
+ if "end" in self.sockets[event_id]: | |
+ self.requests[first_request][ | |
+ "connectEnd" | |
+ ] = self.sockets[event_id]["end"] | |
+ if "tlsStart" in self.sockets[event_id]: | |
+ self.requests[first_request]["tlsStart"] = self.sockets[ | |
+ event_id | |
+ ]["tlsStart"] | |
+ if "tlsEnd" in self.sockets[event_id]: | |
+ self.requests[first_request]["tlsEnd"] = self.sockets[ | |
+ event_id | |
+ ]["tlsEnd"] | |
if event_id in self.dns: | |
- if 'start' in self.dns[event_id]: | |
- self.requests[first_request]['dnsStart'] = \ | |
- self.dns[event_id]['start'] | |
- if 'end' in self.dns[event_id]: | |
- self.requests[first_request]['dnsEnd'] = \ | |
- self.dns[event_id]['end'] | |
+ if "start" in self.dns[event_id]: | |
+ self.requests[first_request]["dnsStart"] = self.dns[ | |
+ event_id | |
+ ]["start"] | |
+ if "end" in self.dns[event_id]: | |
+ self.requests[first_request]["dnsEnd"] = self.dns[ | |
+ event_id | |
+ ]["end"] | |
except Exception: | |
pass | |
def get_empty_request(self, request_id, url): | |
"""Return and empty, initialized request""" | |
parts = urlparse.urlsplit(url) | |
- request = {'type': 3, | |
- 'id': request_id, | |
- 'request_id': request_id, | |
- 'ip_addr': '', | |
- 'full_url': url, | |
- 'is_secure': 1 if parts.scheme == 'https' else 0, | |
- 'method': '', | |
- 'host': parts.netloc, | |
- 'url': parts.path, | |
- 'responseCode': -1, | |
- 'load_start': -1, | |
- 'load_ms': -1, | |
- 'ttfb_ms': -1, | |
- 'dns_start': -1, | |
- 'dns_end': -1, | |
- 'dns_ms': -1, | |
- 'connect_start': -1, | |
- 'connect_end': -1, | |
- 'connect_ms': -1, | |
- 'ssl_start': -1, | |
- 'ssl_end': -1, | |
- 'ssl_ms': -1, | |
- 'bytesIn': 0, | |
- 'bytesOut': 0, | |
- 'objectSize': 0, | |
- 'initiator': '', | |
- 'initiator_line': '', | |
- 'initiator_column': '', | |
- 'server_rtt': None, | |
- 'headers': {'request': [], 'response': []}, | |
- 'score_cache': -1, | |
- 'score_cdn': -1, | |
- 'score_gzip': -1, | |
- 'score_cookies': -1, | |
- 'score_keep-alive': -1, | |
- 'score_minify': -1, | |
- 'score_combine': -1, | |
- 'score_compress': -1, | |
- 'score_etags': -1, | |
- 'gzip_total': None, | |
- 'gzip_save': None, | |
- 'minify_total': None, | |
- 'minify_save': None, | |
- 'image_total': None, | |
- 'image_save': None, | |
- 'cache_time': None, | |
- 'cdn_provider': None, | |
- 'server_count': None, | |
- 'socket': -1 | |
- } | |
+ request = { | |
+ "type": 3, | |
+ "id": request_id, | |
+ "request_id": request_id, | |
+ "ip_addr": "", | |
+ "full_url": url, | |
+ "is_secure": 1 if parts.scheme == "https" else 0, | |
+ "method": "", | |
+ "host": parts.netloc, | |
+ "url": parts.path, | |
+ "responseCode": -1, | |
+ "load_start": -1, | |
+ "load_ms": -1, | |
+ "ttfb_ms": -1, | |
+ "dns_start": -1, | |
+ "dns_end": -1, | |
+ "dns_ms": -1, | |
+ "connect_start": -1, | |
+ "connect_end": -1, | |
+ "connect_ms": -1, | |
+ "ssl_start": -1, | |
+ "ssl_end": -1, | |
+ "ssl_ms": -1, | |
+ "bytesIn": 0, | |
+ "bytesOut": 0, | |
+ "objectSize": 0, | |
+ "initiator": "", | |
+ "initiator_line": "", | |
+ "initiator_column": "", | |
+ "server_rtt": None, | |
+ "headers": {"request": [], "response": []}, | |
+ "score_cache": -1, | |
+ "score_cdn": -1, | |
+ "score_gzip": -1, | |
+ "score_cookies": -1, | |
+ "score_keep-alive": -1, | |
+ "score_minify": -1, | |
+ "score_combine": -1, | |
+ "score_compress": -1, | |
+ "score_etags": -1, | |
+ "gzip_total": None, | |
+ "gzip_save": None, | |
+ "minify_total": None, | |
+ "minify_save": None, | |
+ "image_total": None, | |
+ "image_save": None, | |
+ "cache_time": None, | |
+ "cdn_provider": None, | |
+ "server_count": None, | |
+ "socket": -1, | |
+ } | |
if len(parts.query): | |
- request['url'] += '?' + parts.query | |
+ request["url"] += "?" + parts.query | |
return request | |
def get_header_value(self, headers, name): | |
"""Return the value for the given header""" | |
- value = '' | |
+ value = "" | |
name = name.lower() | |
for header in headers: | |
- pos = header.find(':') | |
+ pos = header.find(":") | |
if pos > 0: | |
key = header[0:pos].lower() | |
if key.startswith(name): | |
- val = header[pos + 1:].strip() | |
+ val = header[pos + 1 :].strip() | |
if len(value): | |
- value += '; ' | |
+ value += "; " | |
value += val | |
return value | |
def process_raw_requests(self): | |
"""Convert the requests into the format WPT is expecting""" | |
import zipfile | |
+ | |
requests = [] | |
bodies_zip_file = None | |
body_index = 0 | |
- if 'bodies' in self.job and self.job['bodies']: | |
- bodies_zip_path = os.path.join(self.task['dir'], \ | |
- self.task['prefix'] + '_bodies.zip') | |
- bodies_zip_file = zipfile.ZipFile(bodies_zip_path, 'w', zipfile.ZIP_DEFLATED) | |
+ if "bodies" in self.job and self.job["bodies"]: | |
+ bodies_zip_path = os.path.join( | |
+ self.task["dir"], self.task["prefix"] + "_bodies.zip" | |
+ ) | |
+ bodies_zip_file = zipfile.ZipFile( | |
+ bodies_zip_path, "w", zipfile.ZIP_DEFLATED | |
+ ) | |
for req_id in self.requests: | |
try: | |
req = self.requests[req_id] | |
- if 'start' in req and 'url' in req and \ | |
- not req['url'].startswith("https://www.bing.com/cortanaassist/gping"): | |
- request = self.get_empty_request(req['id'], req['url']) | |
- if 'verb' in req: | |
- request['method'] = req['verb'] | |
- if 'status' in req: | |
- request['responseCode'] = req['status'] | |
- request['status'] = req['status'] | |
- if 'protocol' in req: | |
- request['protocol'] = req['protocol'] | |
- if 'created' in req: | |
- request['created'] = req['created'] | |
- if 'start' in req: | |
- request['load_start'] = int(round(req['start'])) | |
- if 'firstByte' in req: | |
- ttfb = int(round(req['firstByte'] - req['start'])) | |
- request['ttfb_ms'] = max(0, ttfb) | |
- if 'end' in req: | |
- load_time = int(round(req['end'] - req['start'])) | |
- request['load_ms'] = max(0, load_time) | |
- if 'dnsStart' in req: | |
- request['dns_start'] = int(round(req['dnsStart'])) | |
- if 'dnsEnd' in req: | |
- request['dns_end'] = int(round(req['dnsEnd'])) | |
- if 'connectStart' in req: | |
- request['connect_start'] = int(round(req['connectStart'])) | |
- if 'connectEnd' in req: | |
- request['connect_end'] = int(round(req['connectEnd'])) | |
- if 'tlsStart' in req: | |
- request['ssl_start'] = int(round(req['tlsStart'])) | |
- if 'tlsEnd' in req: | |
- request['ssl_end'] = int(round(req['tlsEnd'])) | |
- if 'inBytes' in req: | |
- request['bytesIn'] = req['inBytes'] | |
- request['objectSize'] = req['inBytes'] | |
- if 'chunks' in req: | |
- request['chunks'] = req['chunks'] | |
- if 'outBytes' in req: | |
- request['bytesOut'] = req['outBytes'] | |
- if 'connection' in req: | |
- connect_id = req['connection'] | |
+ if ( | |
+ "start" in req | |
+ and "url" in req | |
+ and not req["url"].startswith( | |
+ "https://www.bing.com/cortanaassist/gping" | |
+ ) | |
+ ): | |
+ request = self.get_empty_request(req["id"], req["url"]) | |
+ if "verb" in req: | |
+ request["method"] = req["verb"] | |
+ if "status" in req: | |
+ request["responseCode"] = req["status"] | |
+ request["status"] = req["status"] | |
+ if "protocol" in req: | |
+ request["protocol"] = req["protocol"] | |
+ if "created" in req: | |
+ request["created"] = req["created"] | |
+ if "start" in req: | |
+ request["load_start"] = int(round(req["start"])) | |
+ if "firstByte" in req: | |
+ ttfb = int(round(req["firstByte"] - req["start"])) | |
+ request["ttfb_ms"] = max(0, ttfb) | |
+ if "end" in req: | |
+ load_time = int(round(req["end"] - req["start"])) | |
+ request["load_ms"] = max(0, load_time) | |
+ if "dnsStart" in req: | |
+ request["dns_start"] = int(round(req["dnsStart"])) | |
+ if "dnsEnd" in req: | |
+ request["dns_end"] = int(round(req["dnsEnd"])) | |
+ if "connectStart" in req: | |
+ request["connect_start"] = int(round(req["connectStart"])) | |
+ if "connectEnd" in req: | |
+ request["connect_end"] = int(round(req["connectEnd"])) | |
+ if "tlsStart" in req: | |
+ request["ssl_start"] = int(round(req["tlsStart"])) | |
+ if "tlsEnd" in req: | |
+ request["ssl_end"] = int(round(req["tlsEnd"])) | |
+ if "inBytes" in req: | |
+ request["bytesIn"] = req["inBytes"] | |
+ request["objectSize"] = req["inBytes"] | |
+ if "chunks" in req: | |
+ request["chunks"] = req["chunks"] | |
+ if "outBytes" in req: | |
+ request["bytesOut"] = req["outBytes"] | |
+ if "connection" in req: | |
+ connect_id = req["connection"] | |
if connect_id not in self.sockets: | |
- self.sockets[connect_id] = {'index': len(self.sockets)} | |
- request['socket'] = self.sockets[connect_id]['index'] | |
- if 'address' in self.sockets[connect_id]: | |
- request['ip_addr'] = self.sockets[connect_id]['address'] | |
+ self.sockets[connect_id] = {"index": len(self.sockets)} | |
+ request["socket"] = self.sockets[connect_id]["index"] | |
+ if "address" in self.sockets[connect_id]: | |
+ request["ip_addr"] = self.sockets[connect_id]["address"] | |
# Process the headers | |
- if 'outHeaders' in req: | |
- for header in req['outHeaders'].splitlines(): | |
+ if "outHeaders" in req: | |
+ for header in req["outHeaders"].splitlines(): | |
if len(header): | |
- request['headers']['request'].append(header) | |
+ request["headers"]["request"].append(header) | |
# key: value format for the optimization checks | |
- request['request_headers'] = {} | |
- for header in request['headers']['request']: | |
+ request["request_headers"] = {} | |
+ for header in request["headers"]["request"]: | |
split_pos = header.find(":", 1) | |
if split_pos > 1: | |
name = header[:split_pos].strip() | |
- value = header[split_pos + 1:].strip() | |
+ value = header[split_pos + 1 :].strip() | |
if len(name) and len(value): | |
- if name in request['request_headers']: | |
- request['request_headers'][name] += "\r\n" + value | |
+ if name in request["request_headers"]: | |
+ request["request_headers"][name] += ( | |
+ "\r\n" + value | |
+ ) | |
else: | |
- request['request_headers'][name] = value | |
- if 'inHeaders' in req: | |
- for header in req['inHeaders'].splitlines(): | |
+ request["request_headers"][name] = value | |
+ if "inHeaders" in req: | |
+ for header in req["inHeaders"].splitlines(): | |
if len(header): | |
- request['headers']['response'].append(header) | |
+ request["headers"]["response"].append(header) | |
# key: value format for the optimization checks | |
- request['response_headers'] = {} | |
- for header in request['headers']['response']: | |
+ request["response_headers"] = {} | |
+ for header in request["headers"]["response"]: | |
split_pos = header.find(":", 1) | |
if split_pos > 1: | |
name = header[:split_pos].strip() | |
- value = header[split_pos + 1:].strip() | |
+ value = header[split_pos + 1 :].strip() | |
if len(name) and len(value): | |
- if name in request['response_headers']: | |
- request['response_headers'][name] += "\r\n" + value | |
+ if name in request["response_headers"]: | |
+ request["response_headers"][name] += ( | |
+ "\r\n" + value | |
+ ) | |
else: | |
- request['response_headers'][name] = value | |
- value = self.get_header_value(request['headers']['response'], 'Expires') | |
+ request["response_headers"][name] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Expires" | |
+ ) | |
if value: | |
- request['expires'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Cache-Control') | |
+ request["expires"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Cache-Control" | |
+ ) | |
if value: | |
- request['cacheControl'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Content-Type') | |
+ request["cacheControl"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Content-Type" | |
+ ) | |
if value: | |
- request['contentType'] = value | |
- value = self.get_header_value(request['headers']['response'], | |
- 'Content-Encoding') | |
+ request["contentType"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Content-Encoding" | |
+ ) | |
if value: | |
- request['contentEncoding'] = value | |
- value = self.get_header_value(request['headers']['response'], 'Content-Length') | |
+ request["contentEncoding"] = value | |
+ value = self.get_header_value( | |
+ request["headers"]["response"], "Content-Length" | |
+ ) | |
if value: | |
- if 'objectSize' not in request or value < request['objectSize']: | |
- request['objectSize'] = value | |
+ if "objectSize" not in request or value < request["objectSize"]: | |
+ request["objectSize"] = value | |
# process the response body | |
body_file = os.path.join(self.bodies_path, req_id) | |
if os.path.isfile(body_file): | |
- request['body'] = body_file | |
- request['objectSizeUncompressed'] = os.path.getsize(body_file) | |
+ request["body"] = body_file | |
+ request["objectSizeUncompressed"] = os.path.getsize(body_file) | |
is_text = False | |
- if 'contentType' in request and 'responseCode' in request and \ | |
- request['responseCode'] == 200: | |
- if request['contentType'].startswith('text/') or \ | |
- request['contentType'].find('javascript') >= 0 or \ | |
- request['contentType'].find('json') >= 0 or \ | |
- request['contentType'].find('xml') >= 0 or\ | |
- request['contentType'].find('/svg') >= 0: | |
+ if ( | |
+ "contentType" in request | |
+ and "responseCode" in request | |
+ and request["responseCode"] == 200 | |
+ ): | |
+ if ( | |
+ request["contentType"].startswith("text/") | |
+ or request["contentType"].find("javascript") >= 0 | |
+ or request["contentType"].find("json") >= 0 | |
+ or request["contentType"].find("xml") >= 0 | |
+ or request["contentType"].find("/svg") >= 0 | |
+ ): | |
is_text = True | |
if bodies_zip_file is not None and is_text: | |
body_index += 1 | |
- name = '{0:03d}-{1}-body.txt'.format(body_index, request['id']) | |
+ name = "{0:03d}-{1}-body.txt".format( | |
+ body_index, request["id"] | |
+ ) | |
bodies_zip_file.write(body_file, name) | |
- request['body_id'] = request['id'] | |
- logging.debug('%s: Stored body in zip for (%s)', | |
- request['id'], request['url']) | |
+ request["body_id"] = request["id"] | |
+ logging.debug( | |
+ "%s: Stored body in zip for (%s)", | |
+ request["id"], | |
+ request["url"], | |
+ ) | |
requests.append(request) | |
except Exception: | |
pass | |
if bodies_zip_file is not None: | |
bodies_zip_file.close() | |
- requests.sort(key=lambda x: x['load_start']) | |
+ requests.sort(key=lambda x: x["load_start"]) | |
return requests | |
def check_optimization(self, task, requests, page_data): | |
"""Run the optimization checks""" | |
# build an dictionary of the requests | |
opt_requests = {} | |
for request in requests: | |
- opt_requests[request['id']] = request | |
+ opt_requests[request["id"]] = request | |
optimization = OptimizationChecks(self.job, task, opt_requests) | |
optimization.start() | |
optimization.join() | |
# remove the temporary entries we added | |
for request in requests: | |
- if 'response_headers' in request: | |
- del request['response_headers'] | |
- if 'request_headers' in request: | |
- del request['request_headers'] | |
- if 'body' in request: | |
- del request['body'] | |
- if 'response_body' in request: | |
- del request['response_body'] | |
+ if "response_headers" in request: | |
+ del request["response_headers"] | |
+ if "request_headers" in request: | |
+ del request["request_headers"] | |
+ if "body" in request: | |
+ del request["body"] | |
+ if "response_body" in request: | |
+ del request["response_body"] | |
# merge the optimization results | |
- optimization_file = os.path.join(self.task['dir'], self.task['prefix']) + \ | |
- '_optimization.json.gz' | |
+ optimization_file = ( | |
+ os.path.join(self.task["dir"], self.task["prefix"]) | |
+ + "_optimization.json.gz" | |
+ ) | |
if os.path.isfile(optimization_file): | |
- with gzip.open(optimization_file, 'rb') as f_in: | |
+ with gzip.open(optimization_file, "rb") as f_in: | |
optimization_results = json.load(f_in) | |
- page_data['score_cache'] = -1 | |
- page_data['score_cdn'] = -1 | |
- page_data['score_gzip'] = -1 | |
- page_data['score_cookies'] = -1 | |
- page_data['score_keep-alive'] = -1 | |
- page_data['score_minify'] = -1 | |
- page_data['score_combine'] = -1 | |
- page_data['score_compress'] = -1 | |
- page_data['score_etags'] = -1 | |
- page_data['score_progressive_jpeg'] = -1 | |
- page_data['gzip_total'] = 0 | |
- page_data['gzip_savings'] = 0 | |
- page_data['minify_total'] = -1 | |
- page_data['minify_savings'] = -1 | |
- page_data['image_total'] = 0 | |
- page_data['image_savings'] = 0 | |
- page_data['optimization_checked'] = 1 | |
- page_data['base_page_cdn'] = '' | |
+ page_data["score_cache"] = -1 | |
+ page_data["score_cdn"] = -1 | |
+ page_data["score_gzip"] = -1 | |
+ page_data["score_cookies"] = -1 | |
+ page_data["score_keep-alive"] = -1 | |
+ page_data["score_minify"] = -1 | |
+ page_data["score_combine"] = -1 | |
+ page_data["score_compress"] = -1 | |
+ page_data["score_etags"] = -1 | |
+ page_data["score_progressive_jpeg"] = -1 | |
+ page_data["gzip_total"] = 0 | |
+ page_data["gzip_savings"] = 0 | |
+ page_data["minify_total"] = -1 | |
+ page_data["minify_savings"] = -1 | |
+ page_data["image_total"] = 0 | |
+ page_data["image_savings"] = 0 | |
+ page_data["optimization_checked"] = 1 | |
+ page_data["base_page_cdn"] = "" | |
cache_count = 0 | |
cache_total = 0 | |
cdn_count = 0 | |
cdn_total = 0 | |
keep_alive_count = 0 | |
keep_alive_total = 0 | |
progressive_total_bytes = 0 | |
progressive_bytes = 0 | |
for request in requests: | |
- if request['responseCode'] == 200: | |
- request_id = str(request['id']) | |
+ if request["responseCode"] == 200: | |
+ request_id = str(request["id"]) | |
if request_id in optimization_results: | |
opt = optimization_results[request_id] | |
- if 'cache' in opt: | |
- request['score_cache'] = opt['cache']['score'] | |
- request['cache_time'] = opt['cache']['time'] | |
- if request['score_cache'] >= 0: | |
+ if "cache" in opt: | |
+ request["score_cache"] = opt["cache"]["score"] | |
+ request["cache_time"] = opt["cache"]["time"] | |
+ if request["score_cache"] >= 0: | |
cache_count += 1 | |
- cache_total += request['score_cache'] | |
- if 'cdn' in opt: | |
- request['score_cdn'] = opt['cdn']['score'] | |
- request['cdn_provider'] = opt['cdn']['provider'] | |
- if request['score_cdn'] >= 0: | |
+ cache_total += request["score_cache"] | |
+ if "cdn" in opt: | |
+ request["score_cdn"] = opt["cdn"]["score"] | |
+ request["cdn_provider"] = opt["cdn"]["provider"] | |
+ if request["score_cdn"] >= 0: | |
cdn_count += 1 | |
- cdn_total += request['score_cdn'] | |
- if 'is_base_page' in request and request['is_base_page'] and \ | |
- request['cdn_provider'] is not None: | |
- page_data['base_page_cdn'] = request['cdn_provider'] | |
- if 'keep_alive' in opt: | |
- request['score_keep-alive'] = opt['keep_alive']['score'] | |
- if request['score_keep-alive'] >= 0: | |
+ cdn_total += request["score_cdn"] | |
+ if ( | |
+ "is_base_page" in request | |
+ and request["is_base_page"] | |
+ and request["cdn_provider"] is not None | |
+ ): | |
+ page_data["base_page_cdn"] = request["cdn_provider"] | |
+ if "keep_alive" in opt: | |
+ request["score_keep-alive"] = opt["keep_alive"]["score"] | |
+ if request["score_keep-alive"] >= 0: | |
keep_alive_count += 1 | |
- keep_alive_total += request['score_keep-alive'] | |
- if 'gzip' in opt: | |
- savings = opt['gzip']['size'] - opt['gzip']['target_size'] | |
- request['score_gzip'] = opt['gzip']['score'] | |
- request['gzip_total'] = opt['gzip']['size'] | |
- request['gzip_save'] = savings | |
- if request['score_gzip'] >= 0: | |
- page_data['gzip_total'] += opt['gzip']['size'] | |
- page_data['gzip_savings'] += savings | |
- if 'image' in opt: | |
- savings = opt['image']['size'] - opt['image']['target_size'] | |
- request['score_compress'] = opt['image']['score'] | |
- request['image_total'] = opt['image']['size'] | |
- request['image_save'] = savings | |
- if request['score_compress'] >= 0: | |
- page_data['image_total'] += opt['image']['size'] | |
- page_data['image_savings'] += savings | |
- if 'progressive' in opt: | |
- size = opt['progressive']['size'] | |
- request['jpeg_scan_count'] = opt['progressive']['scan_count'] | |
+ keep_alive_total += request["score_keep-alive"] | |
+ if "gzip" in opt: | |
+ savings = opt["gzip"]["size"] - opt["gzip"]["target_size"] | |
+ request["score_gzip"] = opt["gzip"]["score"] | |
+ request["gzip_total"] = opt["gzip"]["size"] | |
+ request["gzip_save"] = savings | |
+ if request["score_gzip"] >= 0: | |
+ page_data["gzip_total"] += opt["gzip"]["size"] | |
+ page_data["gzip_savings"] += savings | |
+ if "image" in opt: | |
+ savings = opt["image"]["size"] - opt["image"]["target_size"] | |
+ request["score_compress"] = opt["image"]["score"] | |
+ request["image_total"] = opt["image"]["size"] | |
+ request["image_save"] = savings | |
+ if request["score_compress"] >= 0: | |
+ page_data["image_total"] += opt["image"]["size"] | |
+ page_data["image_savings"] += savings | |
+ if "progressive" in opt: | |
+ size = opt["progressive"]["size"] | |
+ request["jpeg_scan_count"] = opt["progressive"][ | |
+ "scan_count" | |
+ ] | |
progressive_total_bytes += size | |
- if request['jpeg_scan_count'] > 1: | |
- request['score_progressive_jpeg'] = 100 | |
+ if request["jpeg_scan_count"] > 1: | |
+ request["score_progressive_jpeg"] = 100 | |
progressive_bytes += size | |
elif size < 10240: | |
- request['score_progressive_jpeg'] = 50 | |
+ request["score_progressive_jpeg"] = 50 | |
else: | |
- request['score_progressive_jpeg'] = 0 | |
+ request["score_progressive_jpeg"] = 0 | |
if cache_count > 0: | |
- page_data['score_cache'] = int(round(cache_total / cache_count)) | |
+ page_data["score_cache"] = int(round(cache_total / cache_count)) | |
if cdn_count > 0: | |
- page_data['score_cdn'] = int(round(cdn_total / cdn_count)) | |
+ page_data["score_cdn"] = int(round(cdn_total / cdn_count)) | |
if keep_alive_count > 0: | |
- page_data['score_keep-alive'] = int(round(keep_alive_total / keep_alive_count)) | |
- if page_data['gzip_total'] > 0: | |
- page_data['score_gzip'] = 100 - int(page_data['gzip_savings'] * 100 / | |
- page_data['gzip_total']) | |
- if page_data['image_total'] > 0: | |
- page_data['score_compress'] = 100 - int(page_data['image_savings'] * 100 / | |
- page_data['image_total']) | |
+ page_data["score_keep-alive"] = int( | |
+ round(keep_alive_total / keep_alive_count) | |
+ ) | |
+ if page_data["gzip_total"] > 0: | |
+ page_data["score_gzip"] = 100 - int( | |
+ page_data["gzip_savings"] * 100 / page_data["gzip_total"] | |
+ ) | |
+ if page_data["image_total"] > 0: | |
+ page_data["score_compress"] = 100 - int( | |
+ page_data["image_savings"] * 100 / page_data["image_total"] | |
+ ) | |
if progressive_total_bytes > 0: | |
- page_data['score_progressive_jpeg'] = int(round(progressive_bytes * 100 / | |
- progressive_total_bytes)) | |
+ page_data["score_progressive_jpeg"] = int( | |
+ round(progressive_bytes * 100 / progressive_total_bytes) | |
+ ) | |
def calculate_page_stats(self, requests): | |
"""Calculate the page-level stats""" | |
- page = {'loadTime': 0, | |
- 'docTime': 0, | |
- 'fullyLoaded': 0, | |
- 'bytesOut': 0, | |
- 'bytesOutDoc': 0, | |
- 'bytesIn': 0, | |
- 'bytesInDoc': 0, | |
- 'requests': len(requests), | |
- 'requestsDoc': 0, | |
- 'responses_200': 0, | |
- 'responses_404': 0, | |
- 'responses_other': 0, | |
- 'result': 0, | |
- 'testStartOffset': 0, | |
- 'cached': 1 if self.task['cached'] else 0, | |
- 'optimization_checked': 0, | |
- 'connections': 0, | |
- 'start_epoch': int((self.task['start_time'] - \ | |
- datetime.utcfromtimestamp(0)).total_seconds()) | |
- } | |
- if 'loadEventStart' in self.page: | |
- page['loadTime'] = int(round(self.page['loadEventStart'])) | |
- page['docTime'] = page['loadTime'] | |
- page['fullyLoaded'] = page['loadTime'] | |
- page['loadEventStart'] = page['loadTime'] | |
- page['loadEventEnd'] = page['loadTime'] | |
- if 'loadEventEnd' in self.page: | |
- page['loadEventEnd'] = int(round(self.page['loadEventEnd'])) | |
- if 'domContentLoadedEventStart' in self.page: | |
- page['domContentLoadedEventStart'] = int(round(self.page['domContentLoadedEventStart'])) | |
- page['domContentLoadedEventEnd'] = page['domContentLoadedEventStart'] | |
- if 'domContentLoadedEventEnd' in self.page: | |
- page['domContentLoadedEventEnd'] = int(round(self.page['domContentLoadedEventEnd'])) | |
+ page = { | |
+ "loadTime": 0, | |
+ "docTime": 0, | |
+ "fullyLoaded": 0, | |
+ "bytesOut": 0, | |
+ "bytesOutDoc": 0, | |
+ "bytesIn": 0, | |
+ "bytesInDoc": 0, | |
+ "requests": len(requests), | |
+ "requestsDoc": 0, | |
+ "responses_200": 0, | |
+ "responses_404": 0, | |
+ "responses_other": 0, | |
+ "result": 0, | |
+ "testStartOffset": 0, | |
+ "cached": 1 if self.task["cached"] else 0, | |
+ "optimization_checked": 0, | |
+ "connections": 0, | |
+ "start_epoch": int( | |
+ (self.task["start_time"] - datetime.utcfromtimestamp(0)).total_seconds() | |
+ ), | |
+ } | |
+ if "loadEventStart" in self.page: | |
+ page["loadTime"] = int(round(self.page["loadEventStart"])) | |
+ page["docTime"] = page["loadTime"] | |
+ page["fullyLoaded"] = page["loadTime"] | |
+ page["loadEventStart"] = page["loadTime"] | |
+ page["loadEventEnd"] = page["loadTime"] | |
+ if "loadEventEnd" in self.page: | |
+ page["loadEventEnd"] = int(round(self.page["loadEventEnd"])) | |
+ if "domContentLoadedEventStart" in self.page: | |
+ page["domContentLoadedEventStart"] = int( | |
+ round(self.page["domContentLoadedEventStart"]) | |
+ ) | |
+ page["domContentLoadedEventEnd"] = page["domContentLoadedEventStart"] | |
+ if "domContentLoadedEventEnd" in self.page: | |
+ page["domContentLoadedEventEnd"] = int( | |
+ round(self.page["domContentLoadedEventEnd"]) | |
+ ) | |
connections = {} | |
main_request = None | |
index = 0 | |
for request in requests: | |
- if 'socket' in request and request['socket'] not in connections: | |
- connections[request['socket']] = request['id'] | |
- if request['load_ms'] >= 0: | |
- end_time = request['load_start'] + request['load_ms'] | |
- if end_time > page['fullyLoaded']: | |
- page['fullyLoaded'] = end_time | |
- if end_time <= page['loadTime']: | |
- page['requestsDoc'] += 1 | |
- page['bytesInDoc'] += request['bytesIn'] | |
- page['bytesOutDoc'] += request['bytesOut'] | |
- page['bytesIn'] += request['bytesIn'] | |
- page['bytesOut'] += request['bytesOut'] | |
- if request['responseCode'] == 200: | |
- page['responses_200'] += 1 | |
- elif request['responseCode'] == 404: | |
- page['responses_404'] += 1 | |
- page['result'] = 99999 | |
- elif request['responseCode'] > -1: | |
- page['responses_other'] += 1 | |
- if main_request is None and \ | |
- (request['responseCode'] == 200 or \ | |
- request['responseCode'] == 304 or \ | |
- request['responseCode'] >= 400): | |
- main_request = request['id'] | |
- request['is_base_page'] = True | |
- page['final_base_page_request'] = index | |
- page['final_base_page_request_id'] = main_request | |
- page['final_url'] = request['full_url'] | |
- if 'URL' not in self.task['page_data']: | |
- self.task['page_data']['URL'] = page['final_url'] | |
- if request['ttfb_ms'] >= 0: | |
- page['TTFB'] = request['load_start'] + request['ttfb_ms'] | |
- if request['ssl_end'] >= request['ssl_start'] and \ | |
- request['ssl_start'] >= 0: | |
- page['basePageSSLTime'] = int(round(request['ssl_end'] - \ | |
- request['ssl_start'])) | |
- if request['responseCode'] >= 400: | |
- page['result'] = request['responseCode'] | |
- if (page['result'] == 0 or page['result'] == 99999) and \ | |
- page['responses_200'] == 0 and len(requests): | |
- if 'responseCode' in requests[0]: | |
- page['result'] = requests[0]['responseCode'] | |
+ if "socket" in request and request["socket"] not in connections: | |
+ connections[request["socket"]] = request["id"] | |
+ if request["load_ms"] >= 0: | |
+ end_time = request["load_start"] + request["load_ms"] | |
+ if end_time > page["fullyLoaded"]: | |
+ page["fullyLoaded"] = end_time | |
+ if end_time <= page["loadTime"]: | |
+ page["requestsDoc"] += 1 | |
+ page["bytesInDoc"] += request["bytesIn"] | |
+ page["bytesOutDoc"] += request["bytesOut"] | |
+ page["bytesIn"] += request["bytesIn"] | |
+ page["bytesOut"] += request["bytesOut"] | |
+ if request["responseCode"] == 200: | |
+ page["responses_200"] += 1 | |
+ elif request["responseCode"] == 404: | |
+ page["responses_404"] += 1 | |
+ page["result"] = 99999 | |
+ elif request["responseCode"] > -1: | |
+ page["responses_other"] += 1 | |
+ if main_request is None and ( | |
+ request["responseCode"] == 200 | |
+ or request["responseCode"] == 304 | |
+ or request["responseCode"] >= 400 | |
+ ): | |
+ main_request = request["id"] | |
+ request["is_base_page"] = True | |
+ page["final_base_page_request"] = index | |
+ page["final_base_page_request_id"] = main_request | |
+ page["final_url"] = request["full_url"] | |
+ if "URL" not in self.task["page_data"]: | |
+ self.task["page_data"]["URL"] = page["final_url"] | |
+ if request["ttfb_ms"] >= 0: | |
+ page["TTFB"] = request["load_start"] + request["ttfb_ms"] | |
+ if ( | |
+ request["ssl_end"] >= request["ssl_start"] | |
+ and request["ssl_start"] >= 0 | |
+ ): | |
+ page["basePageSSLTime"] = int( | |
+ round(request["ssl_end"] - request["ssl_start"]) | |
+ ) | |
+ if request["responseCode"] >= 400: | |
+ page["result"] = request["responseCode"] | |
+ if ( | |
+ (page["result"] == 0 or page["result"] == 99999) | |
+ and page["responses_200"] == 0 | |
+ and len(requests) | |
+ ): | |
+ if "responseCode" in requests[0]: | |
+ page["result"] = requests[0]["responseCode"] | |
else: | |
- page['result'] = 12999 | |
- page['connections'] = len(connections) | |
- self.task['page_result'] = page['result'] | |
+ page["result"] = 12999 | |
+ page["connections"] = len(connections) | |
+ self.task["page_result"] = page["result"] | |
return page | |
--- ws4py/client/geventclient.py 2018-09-21 20:15:22.161190 +0000 | |
+++ ws4py/client/geventclient.py 2019-02-06 17:08:29.558437 +0000 | |
@@ -5,14 +5,23 @@ | |
from gevent import Greenlet | |
from gevent.queue import Queue | |
from ws4py.client import WebSocketBaseClient | |
-__all__ = ['WebSocketClient'] | |
+__all__ = ["WebSocketClient"] | |
+ | |
class WebSocketClient(WebSocketBaseClient): | |
- def __init__(self, url, protocols=None, extensions=None, heartbeat_freq=None, ssl_options=None, headers=None): | |
+ def __init__( | |
+ self, | |
+ url, | |
+ protocols=None, | |
+ extensions=None, | |
+ heartbeat_freq=None, | |
+ ssl_options=None, | |
+ headers=None, | |
+ ): | |
""" | |
WebSocket client that executes the | |
:meth:`run() <ws4py.websocket.WebSocket.run>` into a gevent greenlet. | |
.. code-block:: python | |
@@ -38,12 +47,19 @@ | |
gevent.spawn(incoming), | |
gevent.spawn(outgoing), | |
] | |
gevent.joinall(greenlets) | |
""" | |
- WebSocketBaseClient.__init__(self, url, protocols, extensions, heartbeat_freq, | |
- ssl_options=ssl_options, headers=headers) | |
+ WebSocketBaseClient.__init__( | |
+ self, | |
+ url, | |
+ protocols, | |
+ extensions, | |
+ heartbeat_freq, | |
+ ssl_options=ssl_options, | |
+ headers=headers, | |
+ ) | |
self._th = Greenlet(self.run) | |
self.messages = Queue() | |
""" | |
Queue that will hold received messages. | |
--- ws4py/client/threadedclient.py 2018-09-21 20:15:22.161309 +0000 | |
+++ ws4py/client/threadedclient.py 2019-02-06 17:08:29.645150 +0000 | |
@@ -1,15 +1,23 @@ | |
# -*- coding: utf-8 -*- | |
import threading | |
from ws4py.client import WebSocketBaseClient | |
-__all__ = ['WebSocketClient'] | |
+__all__ = ["WebSocketClient"] | |
+ | |
class WebSocketClient(WebSocketBaseClient): | |
- def __init__(self, url, protocols=None, extensions=None, heartbeat_freq=None, | |
- ssl_options=None, headers=None): | |
+ def __init__( | |
+ self, | |
+ url, | |
+ protocols=None, | |
+ extensions=None, | |
+ heartbeat_freq=None, | |
+ ssl_options=None, | |
+ headers=None, | |
+ ): | |
""" | |
.. code-block:: python | |
from ws4py.client.threadedclient import WebSocketClient | |
@@ -29,13 +37,20 @@ | |
ws.connect() | |
except KeyboardInterrupt: | |
ws.close() | |
""" | |
- WebSocketBaseClient.__init__(self, url, protocols, extensions, heartbeat_freq, | |
- ssl_options, headers=headers) | |
- self._th = threading.Thread(target=self.run, name='WebSocketClient') | |
+ WebSocketBaseClient.__init__( | |
+ self, | |
+ url, | |
+ protocols, | |
+ extensions, | |
+ heartbeat_freq, | |
+ ssl_options, | |
+ headers=headers, | |
+ ) | |
+ self._th = threading.Thread(target=self.run, name="WebSocketClient") | |
self._th.daemon = True | |
@property | |
def daemon(self): | |
""" | |
@@ -65,11 +80,12 @@ | |
Starts the client's thread. | |
""" | |
self._th.start() | |
-if __name__ == '__main__': | |
+ | |
+if __name__ == "__main__": | |
from ws4py.client.threadedclient import WebSocketClient | |
class EchoClient(WebSocketClient): | |
def opened(self): | |
def data_provider(): | |
@@ -85,15 +101,18 @@ | |
print(("Closed down", code, reason)) | |
def received_message(self, m): | |
print("#%d" % len(m)) | |
if len(m) == 175: | |
- self.close(reason='bye bye') | |
+ self.close(reason="bye bye") | |
try: | |
- ws = EchoClient('ws://localhost:9000/ws', protocols=['http-only', 'chat'], | |
- headers=[('X-Test', 'hello there')]) | |
+ ws = EchoClient( | |
+ "ws://localhost:9000/ws", | |
+ protocols=["http-only", "chat"], | |
+ headers=[("X-Test", "hello there")], | |
+ ) | |
ws.connect() | |
ws.run_forever() | |
except KeyboardInterrupt: | |
ws.close() | |
--- internal/safari_ios.py 2018-10-05 09:54:21.186436 +0000 | |
+++ internal/safari_ios.py 2019-02-06 17:08:29.734997 +0000 | |
@@ -17,10 +17,11 @@ | |
import monotonic | |
import ujson as json | |
from ws4py.client.threadedclient import WebSocketClient | |
from .optimization_checks import OptimizationChecks | |
from .base_browser import BaseBrowser | |
+ | |
class iWptBrowser(BaseBrowser): | |
"""iOS""" | |
def __init__(self, ios_device, options, job): | |
@@ -53,11 +54,11 @@ | |
self.response_bodies = {} | |
self.bodies_zip_file = None | |
self.body_fail_count = 0 | |
self.body_index = 0 | |
self.last_activity = monotonic.monotonic() | |
- self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js') | |
+ self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "js") | |
self.path_base = None | |
self.websocket = None | |
self.command_id = 0 | |
self.command_responses = {} | |
self.pending_commands = [] | |
@@ -65,19 +66,25 @@ | |
self.webinspector_proxy = None | |
self.ios_utils_path = None | |
self.ios_version = None | |
plat = platform.system() | |
if plat == "Darwin": | |
- self.ios_utils_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'ios', 'Darwin') | |
+ self.ios_utils_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "ios", "Darwin" | |
+ ) | |
elif plat == "Linux": | |
- if os.uname()[4].startswith('arm'): | |
- self.ios_utils_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'ios', 'arm') | |
+ if os.uname()[4].startswith("arm"): | |
+ self.ios_utils_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support", "ios", "arm" | |
+ ) | |
else: | |
- self.ios_utils_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), | |
- 'support', 'ios', 'Linux64') | |
+ self.ios_utils_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), | |
+ "support", | |
+ "ios", | |
+ "Linux64", | |
+ ) | |
def prepare(self, job, task): | |
"""Prepare the OS for the browser""" | |
self.task = task | |
self.page = {} | |
@@ -89,15 +96,15 @@ | |
self.nav_error = None | |
self.nav_error_code = None | |
self.main_request = None | |
self.ios.notification_queue = self.messages | |
self.ios.stop_browser() | |
- if 'browser' in job and job['browser'].lower().find('landscape') >= 0: | |
+ if "browser" in job and job["browser"].lower().find("landscape") >= 0: | |
self.ios.landscape() | |
else: | |
self.ios.portrait() | |
- if not task['cached']: | |
+ if not task["cached"]: | |
self.clear_profile(task) | |
def clear_profile(self, _): | |
"""Clear the browser profile""" | |
self.ios.clear_cache() | |
@@ -116,57 +123,66 @@ | |
self.connected = False | |
self.flush_messages() | |
self.ios_version = self.ios.get_os_version() | |
if self.ios_utils_path and self.ios.start_browser(): | |
# Start the webinspector proxy | |
- exe = os.path.join(self.ios_utils_path, 'ios_webkit_debug_proxy') | |
- args = [exe, '-F', '-u', self.ios.serial] | |
- logging.debug(' '.join(args)) | |
+ exe = os.path.join(self.ios_utils_path, "ios_webkit_debug_proxy") | |
+ args = [exe, "-F", "-u", self.ios.serial] | |
+ logging.debug(" ".join(args)) | |
self.webinspector_proxy = subprocess.Popen(args) | |
if self.webinspector_proxy: | |
# Connect to the dev tools interface | |
self.connected = self.connect() | |
self.flush_messages() | |
def connect(self, timeout=30): | |
"""Connect to the dev tools interface""" | |
import requests | |
+ | |
proxies = {"http": None, "https": None} | |
ret = False | |
end_time = monotonic.monotonic() + timeout | |
while not ret and monotonic.monotonic() < end_time: | |
try: | |
- response = requests.get("http://localhost:9222/json", timeout=timeout, proxies=proxies) | |
+ response = requests.get( | |
+ "http://localhost:9222/json", timeout=timeout, proxies=proxies | |
+ ) | |
if response.text: | |
tabs = response.json() | |
logging.debug("Dev Tools tabs: %s", json.dumps(tabs)) | |
if tabs: | |
websocket_url = None | |
for index in xrange(len(tabs)): | |
- if 'webSocketDebuggerUrl' in tabs[index]: | |
- websocket_url = tabs[index]['webSocketDebuggerUrl'] | |
+ if "webSocketDebuggerUrl" in tabs[index]: | |
+ websocket_url = tabs[index]["webSocketDebuggerUrl"] | |
break | |
if websocket_url is not None: | |
try: | |
self.websocket = DevToolsClient(websocket_url) | |
self.websocket.messages = self.messages | |
self.websocket.connect() | |
ret = True | |
except Exception as err: | |
- logging.debug("Connect to dev tools websocket Error: %s", | |
- err.__str__()) | |
+ logging.debug( | |
+ "Connect to dev tools websocket Error: %s", | |
+ err.__str__(), | |
+ ) | |
if not ret: | |
# try connecting to 127.0.0.1 instead of localhost | |
try: | |
- websocket_url = websocket_url.replace('localhost', '127.0.0.1') | |
+ websocket_url = websocket_url.replace( | |
+ "localhost", "127.0.0.1" | |
+ ) | |
self.websocket = DevToolsClient(websocket_url) | |
self.websocket.messages = self.messages | |
self.websocket.connect() | |
ret = True | |
except Exception as err: | |
- logging.debug("Connect to dev tools websocket Error: %s", | |
- err.__str__()) | |
+ logging.debug( | |
+ "Connect to dev tools websocket Error: %s", | |
+ err.__str__(), | |
+ ) | |
else: | |
time.sleep(0.5) | |
else: | |
time.sleep(0.5) | |
except Exception as err: | |
@@ -195,45 +211,48 @@ | |
def run_task(self, task): | |
"""Run an individual test""" | |
if self.connected: | |
self.task = task | |
logging.debug("Running test") | |
- end_time = monotonic.monotonic() + task['test_time_limit'] | |
- task['current_step'] = 1 | |
+ end_time = monotonic.monotonic() + task["test_time_limit"] | |
+ task["current_step"] = 1 | |
recording = False | |
- while task['script'] and task['error'] is None and \ | |
- monotonic.monotonic() < end_time: | |
+ while ( | |
+ task["script"] | |
+ and task["error"] is None | |
+ and monotonic.monotonic() < end_time | |
+ ): | |
self.prepare_task(task) | |
- command = task['script'].pop(0) | |
- if not recording and command['record']: | |
+ command = task["script"].pop(0) | |
+ if not recording and command["record"]: | |
recording = True | |
self.on_start_recording(task) | |
try: | |
self.process_command(command) | |
except Exception: | |
logging.exception("Exception running task") | |
- if command['record']: | |
+ if command["record"]: | |
self.wait_for_page_load() | |
- if not task['combine_steps'] or not len(task['script']): | |
+ if not task["combine_steps"] or not len(task["script"]): | |
self.on_stop_capture(task) | |
self.on_stop_recording(task) | |
recording = False | |
self.on_start_processing(task) | |
self.wait_for_processing(task) | |
self.step_complete(task) | |
- if task['log_data']: | |
+ if task["log_data"]: | |
# Move on to the next step | |
- task['current_step'] += 1 | |
+ task["current_step"] += 1 | |
self.event_name = None | |
- task['navigated'] = True | |
+ task["navigated"] = True | |
self.task = None | |
def wait_for_page_load(self): | |
"""Wait for the onload event from the extension""" | |
if self.connected: | |
start_time = monotonic.monotonic() | |
- end_time = start_time + self.task['time_limit'] | |
+ end_time = start_time + self.task["time_limit"] | |
done = False | |
interval = 1 | |
while not done: | |
if self.page_loaded is not None: | |
interval = 0.1 | |
@@ -244,26 +263,31 @@ | |
now = monotonic.monotonic() | |
elapsed_test = now - start_time | |
if self.nav_error is not None: | |
done = True | |
if self.page_loaded is None: | |
- self.task['error'] = self.nav_error | |
+ self.task["error"] = self.nav_error | |
if self.nav_error_code is not None: | |
- self.task['page_data']['result'] = self.nav_error_code | |
+ self.task["page_data"]["result"] = self.nav_error_code | |
else: | |
- self.task['page_data']['result'] = 12999 | |
+ self.task["page_data"]["result"] = 12999 | |
elif now >= end_time: | |
done = True | |
# only consider it an error if we didn't get a page load event | |
if self.page_loaded is None: | |
- self.task['error'] = "Page Load Timeout" | |
- elif 'time' not in self.job or elapsed_test > self.job['time']: | |
+ self.task["error"] = "Page Load Timeout" | |
+ elif "time" not in self.job or elapsed_test > self.job["time"]: | |
elapsed_activity = now - self.last_activity | |
- elapsed_page_load = now - self.page_loaded if self.page_loaded else 0 | |
- if elapsed_page_load >= 1 and elapsed_activity >= self.task['activity_time']: | |
+ elapsed_page_load = ( | |
+ now - self.page_loaded if self.page_loaded else 0 | |
+ ) | |
+ if ( | |
+ elapsed_page_load >= 1 | |
+ and elapsed_activity >= self.task["activity_time"] | |
+ ): | |
done = True | |
- elif self.task['error'] is not None: | |
+ elif self.task["error"] is not None: | |
done = True | |
def execute_js(self, script): | |
"""Run javascipt (stub for overriding""" | |
ret = None | |
@@ -276,404 +300,477 @@ | |
ret = None | |
if self.connected: | |
script = None | |
script_file_path = os.path.join(self.script_dir, file_name) | |
if os.path.isfile(script_file_path): | |
- with open(script_file_path, 'rb') as script_file: | |
+ with open(script_file_path, "rb") as script_file: | |
script = script_file.read() | |
if script is not None: | |
ret = self.ios.execute_js(script) | |
return ret | |
def set_header(self, header): | |
"""Add/modify a header on the outbound requests""" | |
if header is not None and len(header): | |
- separator = header.find(':') | |
+ separator = header.find(":") | |
if separator > 0: | |
name = header[:separator].strip() | |
- value = header[separator + 1:].strip() | |
+ value = header[separator + 1 :].strip() | |
self.headers[name] = value | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, wait=True) | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", {"headers": self.headers}, wait=True | |
+ ) | |
def reset_headers(self): | |
"""Add/modify a header on the outbound requests""" | |
self.headers = {} | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, wait=True) | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", {"headers": self.headers}, wait=True | |
+ ) | |
def collect_browser_metrics(self, task): | |
"""Collect all of the in-page browser metrics that we need""" | |
logging.debug("Collecting user timing metrics") | |
- user_timing = self.run_js_file('user_timing.js') | |
+ user_timing = self.run_js_file("user_timing.js") | |
logging.debug(user_timing) | |
if user_timing is not None and self.path_base is not None: | |
- path = self.path_base + '_timed_events.json.gz' | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = self.path_base + "_timed_events.json.gz" | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(user_timing)) | |
logging.debug("Collecting page-level metrics") | |
- page_data = self.run_js_file('page_data.js') | |
+ page_data = self.run_js_file("page_data.js") | |
logging.debug(page_data) | |
if page_data is not None: | |
- task['page_data'].update(page_data) | |
- if 'customMetrics' in self.job: | |
+ task["page_data"].update(page_data) | |
+ if "customMetrics" in self.job: | |
custom_metrics = {} | |
- for name in self.job['customMetrics']: | |
+ for name in self.job["customMetrics"]: | |
logging.debug("Collecting custom metric %s", name) | |
- script = '(function() {' +\ | |
- self.job['customMetrics'][name] +\ | |
- '})()' | |
+ script = "(function() {" + self.job["customMetrics"][name] + "})()" | |
try: | |
custom_metrics[name] = self.ios.execute_js(script) | |
if custom_metrics[name] is not None: | |
logging.debug(custom_metrics[name]) | |
except Exception: | |
pass | |
- if self.path_base is not None: | |
- path = self.path_base + '_metrics.json.gz' | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ if self.path_base is not None: | |
+ path = self.path_base + "_metrics.json.gz" | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(custom_metrics)) | |
def process_message(self, msg): | |
"""Process a message from the browser | |
https://trac.webkit.org/browser/webkit/trunk/Source/JavaScriptCore/inspector/protocol""" | |
try: | |
- if 'method' in msg and self.recording: | |
- parts = msg['method'].split('.') | |
+ if "method" in msg and self.recording: | |
+ parts = msg["method"].split(".") | |
if len(parts) >= 2: | |
category = parts[0] | |
event = parts[1] | |
- if category == 'Page': | |
+ if category == "Page": | |
self.process_page_event(event, msg) | |
- elif category == 'Network': | |
+ elif category == "Network": | |
self.process_network_event(event, msg) | |
- elif category == 'Inspector': | |
+ elif category == "Inspector": | |
self.process_inspector_event(event) | |
- elif category == 'Timeline': | |
+ elif category == "Timeline": | |
self.process_timeline_event(event, msg) | |
- elif category == 'Console': | |
+ elif category == "Console": | |
self.process_console_event(event, msg) | |
except Exception: | |
pass | |
- if self.timeline and 'method' in msg and self.recording: | |
+ if self.timeline and "method" in msg and self.recording: | |
json.dump(msg, self.timeline) | |
self.timeline.write(",\n") | |
- if 'id' in msg: | |
- response_id = int(re.search(r'\d+', str(msg['id'])).group()) | |
+ if "id" in msg: | |
+ response_id = int(re.search(r"\d+", str(msg["id"])).group()) | |
if response_id in self.pending_commands: | |
self.pending_commands.remove(response_id) | |
self.command_responses[response_id] = msg | |
def process_page_event(self, event, msg): | |
"""Process Page.* dev tools events""" | |
- if 'start' not in self.page and 'params' in msg and 'timestamp' in msg['params']: | |
- self.page['start'] = msg['params']['timestamp'] | |
- if event == 'loadEventFired': | |
+ if ( | |
+ "start" not in self.page | |
+ and "params" in msg | |
+ and "timestamp" in msg["params"] | |
+ ): | |
+ self.page["start"] = msg["params"]["timestamp"] | |
+ if event == "loadEventFired": | |
self.page_loaded = monotonic.monotonic() | |
- self.page['loaded'] = msg['params']['timestamp'] | |
- elif event == 'domContentEventFired': | |
- self.page['DOMContentLoaded'] = msg['params']['timestamp'] | |
- elif event == 'frameStartedLoading': | |
+ self.page["loaded"] = msg["params"]["timestamp"] | |
+ elif event == "domContentEventFired": | |
+ self.page["DOMContentLoaded"] = msg["params"]["timestamp"] | |
+ elif event == "frameStartedLoading": | |
if self.is_navigating and self.main_frame is None: | |
self.is_navigating = False | |
- self.main_frame = msg['params']['frameId'] | |
- if self.main_frame == msg['params']['frameId']: | |
+ self.main_frame = msg["params"]["frameId"] | |
+ if self.main_frame == msg["params"]["frameId"]: | |
logging.debug("Navigating main frame") | |
self.last_activity = monotonic.monotonic() | |
self.page_loaded = None | |
- elif event == 'frameStoppedLoading': | |
- if self.main_frame is not None and \ | |
- not self.page_loaded and \ | |
- self.main_frame == msg['params']['frameId']: | |
+ elif event == "frameStoppedLoading": | |
+ if ( | |
+ self.main_frame is not None | |
+ and not self.page_loaded | |
+ and self.main_frame == msg["params"]["frameId"] | |
+ ): | |
if self.nav_error is not None: | |
- self.task['error'] = self.nav_error | |
+ self.task["error"] = self.nav_error | |
logging.debug("Page load failed: %s", self.nav_error) | |
if self.nav_error_code is not None: | |
- self.task['page_data']['result'] = self.nav_error_code | |
+ self.task["page_data"]["result"] = self.nav_error_code | |
self.page_loaded = monotonic.monotonic() | |
def process_network_event(self, event, msg): | |
"""Process Network.* dev tools events""" | |
- if 'requestId' in msg['params']: | |
+ if "requestId" in msg["params"]: | |
timestamp = None | |
- if 'params' in msg and 'timestamp' in msg['params']: | |
- timestamp = msg['params']['timestamp'] | |
- request_id = msg['params']['requestId'] | |
+ if "params" in msg and "timestamp" in msg["params"]: | |
+ timestamp = msg["params"]["timestamp"] | |
+ request_id = msg["params"]["requestId"] | |
original_request_id = request_id | |
if original_request_id in self.id_map: | |
- request_id = str(original_request_id) + '.' + str(self.id_map[original_request_id]) | |
+ request_id = ( | |
+ str(original_request_id) | |
+ + "." | |
+ + str(self.id_map[original_request_id]) | |
+ ) | |
if request_id not in self.requests: | |
- self.requests[request_id] = {'id': request_id, | |
- 'original_id': original_request_id, | |
- 'bytesIn': 0, | |
- 'objectSize': 0, | |
- 'objectSizeUncompressed': 0, | |
- 'transfer_size': 0, | |
- 'fromNet': False, | |
- 'is_redirect': False} | |
+ self.requests[request_id] = { | |
+ "id": request_id, | |
+ "original_id": original_request_id, | |
+ "bytesIn": 0, | |
+ "objectSize": 0, | |
+ "objectSizeUncompressed": 0, | |
+ "transfer_size": 0, | |
+ "fromNet": False, | |
+ "is_redirect": False, | |
+ } | |
if timestamp: | |
- self.requests[request_id]['created'] = timestamp | |
+ self.requests[request_id]["created"] = timestamp | |
request = self.requests[request_id] | |
- if 'targetId' in msg['params']: | |
- request['targetId'] = msg['params']['targetId'] | |
- ignore_activity = request['is_video'] if 'is_video' in request else False | |
- if event == 'requestWillBeSent': | |
- if 'start' not in self.page and timestamp: | |
- self.page['start'] = timestamp | |
+ if "targetId" in msg["params"]: | |
+ request["targetId"] = msg["params"]["targetId"] | |
+ ignore_activity = request["is_video"] if "is_video" in request else False | |
+ if event == "requestWillBeSent": | |
+ if "start" not in self.page and timestamp: | |
+ self.page["start"] = timestamp | |
# For a redirect, close out the existing request and start a new one | |
- if 'redirectResponse' in msg['params']: | |
- if timestamp and 'start' in request and timestamp > request['start']: | |
- if 'firstByte' not in request or timestamp < request['firstByte']: | |
- request['firstByte'] = timestamp | |
- if 'end' not in request or timestamp > request['end']: | |
- request['end'] = timestamp | |
- request['is_redirect'] = True | |
- response = msg['params']['redirectResponse'] | |
- request['status'] = response['status'] | |
- request['statusText'] = response['statusText'] | |
- request['response_headers'] = response['headers'] | |
- if 'fromDiskCache' in response and response['fromDiskCache']: | |
- request['fromNet'] = False | |
- if 'source' in response and response['source'] not in ['network', 'unknown']: | |
- request['fromNet'] = False | |
- if 'timing' in response: | |
- request['timing'] = response['timing'] | |
+ if "redirectResponse" in msg["params"]: | |
+ if ( | |
+ timestamp | |
+ and "start" in request | |
+ and timestamp > request["start"] | |
+ ): | |
+ if ( | |
+ "firstByte" not in request | |
+ or timestamp < request["firstByte"] | |
+ ): | |
+ request["firstByte"] = timestamp | |
+ if "end" not in request or timestamp > request["end"]: | |
+ request["end"] = timestamp | |
+ request["is_redirect"] = True | |
+ response = msg["params"]["redirectResponse"] | |
+ request["status"] = response["status"] | |
+ request["statusText"] = response["statusText"] | |
+ request["response_headers"] = response["headers"] | |
+ if "fromDiskCache" in response and response["fromDiskCache"]: | |
+ request["fromNet"] = False | |
+ if "source" in response and response["source"] not in [ | |
+ "network", | |
+ "unknown", | |
+ ]: | |
+ request["fromNet"] = False | |
+ if "timing" in response: | |
+ request["timing"] = response["timing"] | |
if original_request_id in self.id_map: | |
self.id_map[original_request_id] += 1 | |
else: | |
self.id_map[original_request_id] = 1 | |
- request_id = str(original_request_id) + '.' + \ | |
- str(self.id_map[original_request_id]) | |
- self.requests[request_id] = {'id': request_id, | |
- 'original_id': original_request_id, | |
- 'bytesIn': 0, | |
- 'objectSize': 0, | |
- 'objectSizeUncompressed': 0, | |
- 'transfer_size': 0, | |
- 'fromNet': False, | |
- 'is_redirect': True} | |
+ request_id = ( | |
+ str(original_request_id) | |
+ + "." | |
+ + str(self.id_map[original_request_id]) | |
+ ) | |
+ self.requests[request_id] = { | |
+ "id": request_id, | |
+ "original_id": original_request_id, | |
+ "bytesIn": 0, | |
+ "objectSize": 0, | |
+ "objectSizeUncompressed": 0, | |
+ "transfer_size": 0, | |
+ "fromNet": False, | |
+ "is_redirect": True, | |
+ } | |
if timestamp: | |
- self.requests[request_id]['created'] = timestamp | |
+ self.requests[request_id]["created"] = timestamp | |
request = self.requests[request_id] | |
if timestamp: | |
- request['start'] = timestamp | |
- request['initiator'] = msg['params']['initiator'] | |
- request['url'] = msg['params']['request']['url'] | |
- request['method'] = msg['params']['request']['method'] | |
- request['request_headers'] = msg['params']['request']['headers'] | |
- if 'type' in msg['params']: | |
- request['type'] = msg['params']['type'] | |
- if request['url'].endswith('.mp4'): | |
- request['is_video'] = True | |
- request['fromNet'] = True | |
- if msg['params']['frameId'] != self.main_frame: | |
- request['frame'] = msg['params']['frameId'] | |
- if self.main_frame is not None and \ | |
- self.main_request is None and \ | |
- msg['params']['frameId'] == self.main_frame: | |
- logging.debug('Main request detected') | |
+ request["start"] = timestamp | |
+ request["initiator"] = msg["params"]["initiator"] | |
+ request["url"] = msg["params"]["request"]["url"] | |
+ request["method"] = msg["params"]["request"]["method"] | |
+ request["request_headers"] = msg["params"]["request"]["headers"] | |
+ if "type" in msg["params"]: | |
+ request["type"] = msg["params"]["type"] | |
+ if request["url"].endswith(".mp4"): | |
+ request["is_video"] = True | |
+ request["fromNet"] = True | |
+ if msg["params"]["frameId"] != self.main_frame: | |
+ request["frame"] = msg["params"]["frameId"] | |
+ if ( | |
+ self.main_frame is not None | |
+ and self.main_request is None | |
+ and msg["params"]["frameId"] == self.main_frame | |
+ ): | |
+ logging.debug("Main request detected") | |
self.main_request = request_id | |
if timestamp: | |
- self.page['start'] = float(msg['params']['timestamp']) | |
- elif event == 'responseReceived': | |
- response = msg['params']['response'] | |
- request['status'] = response['status'] | |
- request['statusText'] = response['statusText'] | |
- request['response_headers'] = response['headers'] | |
- if 'fromDiskCache' in response and response['fromDiskCache']: | |
- request['fromNet'] = False | |
- if 'source' in response and response['source'] not in ['network', 'unknown']: | |
- request['fromNet'] = False | |
- if 'timing' in response: | |
- request['timing'] = response['timing'] | |
- if 'mimeType' in response and response['mimeType'].startswith('video/'): | |
- request['is_video'] = True | |
- if timestamp and 'start' in request and timestamp > request['start']: | |
- if 'firstByte' not in request or timestamp < request['firstByte']: | |
- request['firstByte'] = timestamp | |
- if 'end' not in request or timestamp > request['end']: | |
- request['end'] = timestamp | |
- elif event == 'dataReceived': | |
+ self.page["start"] = float(msg["params"]["timestamp"]) | |
+ elif event == "responseReceived": | |
+ response = msg["params"]["response"] | |
+ request["status"] = response["status"] | |
+ request["statusText"] = response["statusText"] | |
+ request["response_headers"] = response["headers"] | |
+ if "fromDiskCache" in response and response["fromDiskCache"]: | |
+ request["fromNet"] = False | |
+ if "source" in response and response["source"] not in [ | |
+ "network", | |
+ "unknown", | |
+ ]: | |
+ request["fromNet"] = False | |
+ if "timing" in response: | |
+ request["timing"] = response["timing"] | |
+ if "mimeType" in response and response["mimeType"].startswith("video/"): | |
+ request["is_video"] = True | |
+ if timestamp and "start" in request and timestamp > request["start"]: | |
+ if "firstByte" not in request or timestamp < request["firstByte"]: | |
+ request["firstByte"] = timestamp | |
+ if "end" not in request or timestamp > request["end"]: | |
+ request["end"] = timestamp | |
+ elif event == "dataReceived": | |
bytesIn = 0 | |
- if 'encodedDataLength' in msg['params'] and \ | |
- msg['params']['encodedDataLength'] >= 0: | |
- bytesIn = msg['params']['encodedDataLength'] | |
- request['objectSize'] += bytesIn | |
- request['bytesIn'] += bytesIn | |
- request['transfer_size'] += bytesIn | |
- elif 'dataLength' in msg['params'] and msg['params']['dataLength'] >= 0: | |
- bytesIn = msg['params']['dataLength'] | |
- request['objectSize'] += bytesIn | |
- request['bytesIn'] +=bytesIn | |
- request['transfer_size'] += bytesIn | |
- if 'dataLength' in msg['params'] and msg['params']['dataLength'] >= 0: | |
- request['objectSizeUncompressed'] += msg['params']['dataLength'] | |
- if timestamp and 'start' in request and timestamp > request['start']: | |
- if 'chunks' not in request: | |
- request['chunks'] = [] | |
- request['chunks'].append({'ts': timestamp, 'bytes': bytesIn}) | |
- if 'firstByte' not in request or timestamp < request['firstByte']: | |
- request['firstByte'] = timestamp | |
- if 'end' not in request or timestamp > request['end']: | |
- request['end'] = timestamp | |
- elif event == 'loadingFinished': | |
- if timestamp and 'start' in request and timestamp > request['start']: | |
- if 'firstByte' not in request or timestamp < request['firstByte']: | |
- request['firstByte'] = timestamp | |
- if 'end' not in request or timestamp > request['end']: | |
- request['end'] = timestamp | |
- if 'metrics' in msg['params']: | |
- metrics = msg['params']['metrics'] | |
- if 'priority' in metrics: | |
- request['priority'] = metrics['priority'] | |
- if 'protocol' in metrics: | |
- request['protocol'] = metrics['protocol'] | |
- if 'remoteAddress' in metrics: | |
- separator = metrics['remoteAddress'].rfind(':') | |
+ if ( | |
+ "encodedDataLength" in msg["params"] | |
+ and msg["params"]["encodedDataLength"] >= 0 | |
+ ): | |
+ bytesIn = msg["params"]["encodedDataLength"] | |
+ request["objectSize"] += bytesIn | |
+ request["bytesIn"] += bytesIn | |
+ request["transfer_size"] += bytesIn | |
+ elif "dataLength" in msg["params"] and msg["params"]["dataLength"] >= 0: | |
+ bytesIn = msg["params"]["dataLength"] | |
+ request["objectSize"] += bytesIn | |
+ request["bytesIn"] += bytesIn | |
+ request["transfer_size"] += bytesIn | |
+ if "dataLength" in msg["params"] and msg["params"]["dataLength"] >= 0: | |
+ request["objectSizeUncompressed"] += msg["params"]["dataLength"] | |
+ if timestamp and "start" in request and timestamp > request["start"]: | |
+ if "chunks" not in request: | |
+ request["chunks"] = [] | |
+ request["chunks"].append({"ts": timestamp, "bytes": bytesIn}) | |
+ if "firstByte" not in request or timestamp < request["firstByte"]: | |
+ request["firstByte"] = timestamp | |
+ if "end" not in request or timestamp > request["end"]: | |
+ request["end"] = timestamp | |
+ elif event == "loadingFinished": | |
+ if timestamp and "start" in request and timestamp > request["start"]: | |
+ if "firstByte" not in request or timestamp < request["firstByte"]: | |
+ request["firstByte"] = timestamp | |
+ if "end" not in request or timestamp > request["end"]: | |
+ request["end"] = timestamp | |
+ if "metrics" in msg["params"]: | |
+ metrics = msg["params"]["metrics"] | |
+ if "priority" in metrics: | |
+ request["priority"] = metrics["priority"] | |
+ if "protocol" in metrics: | |
+ request["protocol"] = metrics["protocol"] | |
+ if "remoteAddress" in metrics: | |
+ separator = metrics["remoteAddress"].rfind(":") | |
if separator >= 0: | |
- request['ip'] = metrics['remoteAddress'][:separator] | |
+ request["ip"] = metrics["remoteAddress"][:separator] | |
else: | |
- request['ip'] = metrics['remoteAddress'] | |
- if 'connectionIdentifier' in metrics: | |
- identifier = metrics['connectionIdentifier'] | |
+ request["ip"] = metrics["remoteAddress"] | |
+ if "connectionIdentifier" in metrics: | |
+ identifier = metrics["connectionIdentifier"] | |
if identifier in self.connections: | |
- request['connection'] = self.connections[identifier] | |
+ request["connection"] = self.connections[identifier] | |
else: | |
self.last_connection_id += 1 | |
self.connections[identifier] = self.last_connection_id | |
- request['connection'] = self.last_connection_id | |
- if 'requestHeaderBytesSent' in metrics: | |
- request['bytesOut'] = metrics['requestHeaderBytesSent'] | |
- if 'requestBodyBytesSent' in metrics: | |
- request['bytesOut'] += metrics['requestBodyBytesSent'] | |
- if 'responseBodyBytesReceived' in metrics: | |
- request['bytesIn'] = metrics['responseBodyBytesReceived'] | |
- request['objectSize'] = metrics['responseBodyBytesReceived'] | |
- request['transfer_size'] = metrics['responseBodyBytesReceived'] | |
- if 'responseHeaderBytesReceived' in metrics and \ | |
- metrics['responseHeaderBytesReceived'] >= 0: | |
- request['bytesIn'] += metrics['responseHeaderBytesReceived'] | |
- if 'responseBodyDecodedSize' in metrics and \ | |
- metrics['responseBodyDecodedSize'] >= 0: | |
- request['objectSizeUncompressed'] = \ | |
- metrics['responseBodyDecodedSize'] | |
- if request['fromNet']: | |
+ request["connection"] = self.last_connection_id | |
+ if "requestHeaderBytesSent" in metrics: | |
+ request["bytesOut"] = metrics["requestHeaderBytesSent"] | |
+ if "requestBodyBytesSent" in metrics: | |
+ request["bytesOut"] += metrics["requestBodyBytesSent"] | |
+ if "responseBodyBytesReceived" in metrics: | |
+ request["bytesIn"] = metrics["responseBodyBytesReceived"] | |
+ request["objectSize"] = metrics["responseBodyBytesReceived"] | |
+ request["transfer_size"] = metrics["responseBodyBytesReceived"] | |
+ if ( | |
+ "responseHeaderBytesReceived" in metrics | |
+ and metrics["responseHeaderBytesReceived"] >= 0 | |
+ ): | |
+ request["bytesIn"] += metrics["responseHeaderBytesReceived"] | |
+ if ( | |
+ "responseBodyDecodedSize" in metrics | |
+ and metrics["responseBodyDecodedSize"] >= 0 | |
+ ): | |
+ request["objectSizeUncompressed"] = metrics[ | |
+ "responseBodyDecodedSize" | |
+ ] | |
+ if request["fromNet"]: | |
self.get_response_body(request_id, original_request_id) | |
- elif event == 'loadingFailed': | |
- if timestamp and 'start' in request and timestamp > request['start']: | |
- if 'firstByte' not in request or timestamp < request['firstByte']: | |
- request['firstByte'] = timestamp | |
- if 'end' not in request or timestamp > request['end']: | |
- request['end'] = timestamp | |
- request['statusText'] = msg['params']['errorText'] | |
+ elif event == "loadingFailed": | |
+ if timestamp and "start" in request and timestamp > request["start"]: | |
+ if "firstByte" not in request or timestamp < request["firstByte"]: | |
+ request["firstByte"] = timestamp | |
+ if "end" not in request or timestamp > request["end"]: | |
+ request["end"] = timestamp | |
+ request["statusText"] = msg["params"]["errorText"] | |
if self.main_request is not None and request_id == self.main_request: | |
- if 'canceled' not in msg['params'] or not msg['params']['canceled']: | |
- self.task['error'] = msg['params']['errorText'] | |
- self.nav_error = msg['params']['errorText'] | |
+ if "canceled" not in msg["params"] or not msg["params"]["canceled"]: | |
+ self.task["error"] = msg["params"]["errorText"] | |
+ self.nav_error = msg["params"]["errorText"] | |
self.nav_error_code = 12999 | |
- logging.debug('Navigation error: %s', self.nav_error) | |
- elif event == 'requestServedFromMemoryCache': | |
- request['fromNet'] = False | |
+ logging.debug("Navigation error: %s", self.nav_error) | |
+ elif event == "requestServedFromMemoryCache": | |
+ request["fromNet"] = False | |
else: | |
ignore_activity = True | |
- if not self.task['stop_at_onload'] and not ignore_activity: | |
+ if not self.task["stop_at_onload"] and not ignore_activity: | |
self.last_activity = monotonic.monotonic() | |
def process_inspector_event(self, event): | |
"""Process Inspector.* dev tools events""" | |
- if event == 'detached': | |
- self.task['error'] = 'Inspector detached, possibly crashed.' | |
- elif event == 'targetCrashed': | |
- self.task['error'] = 'Browser crashed.' | |
+ if event == "detached": | |
+ self.task["error"] = "Inspector detached, possibly crashed." | |
+ elif event == "targetCrashed": | |
+ self.task["error"] = "Browser crashed." | |
def process_timeline_event(self, event, msg): | |
"""Handle Timeline.* events""" | |
- if self.trace_parser is not None and 'params' in msg and 'record' in msg['params']: | |
- if 'start' not in self.page: | |
+ if ( | |
+ self.trace_parser is not None | |
+ and "params" in msg | |
+ and "record" in msg["params"] | |
+ ): | |
+ if "start" not in self.page: | |
return | |
if self.trace_parser.start_time is None: | |
- self.trace_parser.start_time = self.page['start'] * 1000000.0 | |
- self.trace_parser.end_time = self.page['start'] * 1000000.0 | |
- if 'timestamp' in msg['params']: | |
- timestamp = msg['params']['timestamp'] * 1000000.0 | |
+ self.trace_parser.start_time = self.page["start"] * 1000000.0 | |
+ self.trace_parser.end_time = self.page["start"] * 1000000.0 | |
+ if "timestamp" in msg["params"]: | |
+ timestamp = msg["params"]["timestamp"] * 1000000.0 | |
if timestamp > self.trace_parser.end_time: | |
self.trace_parser.end_time = timestamp | |
- processed = self.trace_parser.ProcessOldTimelineEvent(msg['params']['record'], None) | |
+ processed = self.trace_parser.ProcessOldTimelineEvent( | |
+ msg["params"]["record"], None | |
+ ) | |
if processed is not None: | |
self.trace_parser.timeline_events.append(processed) | |
def process_console_event(self, event, msg): | |
"""Handle Console.* events""" | |
- if event == 'messageAdded' and 'message' in msg['params']: | |
- self.console_log.append(msg['params']['message']) | |
+ if event == "messageAdded" and "message" in msg["params"]: | |
+ self.console_log.append(msg["params"]["message"]) | |
def get_response_body(self, request_id, original_id): | |
"""Retrieve and store the given response body (if necessary)""" | |
if original_id not in self.response_bodies and self.body_fail_count < 3: | |
request = self.requests[request_id] | |
- if 'status' in request and request['status'] == 200 and 'response_headers' in request: | |
- logging.debug('Getting body for %s (%d) - %s', request_id, | |
- request['bytesIn'], request['url']) | |
- path = os.path.join(self.task['dir'], 'bodies') | |
+ if ( | |
+ "status" in request | |
+ and request["status"] == 200 | |
+ and "response_headers" in request | |
+ ): | |
+ logging.debug( | |
+ "Getting body for %s (%d) - %s", | |
+ request_id, | |
+ request["bytesIn"], | |
+ request["url"], | |
+ ) | |
+ path = os.path.join(self.task["dir"], "bodies") | |
if not os.path.isdir(path): | |
os.makedirs(path) | |
body_file_path = os.path.join(path, original_id) | |
if not os.path.exists(body_file_path): | |
# Only grab bodies needed for optimization checks | |
# or if we are saving full bodies | |
need_body = True | |
- content_type = self.get_header_value(request['response_headers'], | |
- 'Content-Type') | |
+ content_type = self.get_header_value( | |
+ request["response_headers"], "Content-Type" | |
+ ) | |
is_text = False | |
if content_type is not None: | |
content_type = content_type.lower() | |
- if content_type.startswith('text/') or \ | |
- content_type.find('javascript') >= 0 or \ | |
- content_type.find('json') >= 0: | |
+ if ( | |
+ content_type.startswith("text/") | |
+ or content_type.find("javascript") >= 0 | |
+ or content_type.find("json") >= 0 | |
+ ): | |
is_text = True | |
# Ignore video files over 10MB | |
- if content_type[:6] == 'video/' and request['bytesIn'] > 10000000: | |
+ if ( | |
+ content_type[:6] == "video/" | |
+ and request["bytesIn"] > 10000000 | |
+ ): | |
need_body = False | |
- optimization_checks_disabled = bool('noopt' in self.job and self.job['noopt']) | |
+ optimization_checks_disabled = bool( | |
+ "noopt" in self.job and self.job["noopt"] | |
+ ) | |
if optimization_checks_disabled and self.bodies_zip_file is None: | |
need_body = False | |
if need_body: | |
- response = self.send_command("Network.getResponseBody", | |
- {'requestId': original_id}, wait=True) | |
+ response = self.send_command( | |
+ "Network.getResponseBody", | |
+ {"requestId": original_id}, | |
+ wait=True, | |
+ ) | |
if response is None: | |
self.body_fail_count += 1 | |
- logging.warning('No response to body request for request %s', | |
- request_id) | |
- elif 'result' not in response or \ | |
- 'body' not in response['result']: | |
+ logging.warning( | |
+ "No response to body request for request %s", request_id | |
+ ) | |
+ elif ( | |
+ "result" not in response or "body" not in response["result"] | |
+ ): | |
self.body_fail_count = 0 | |
- logging.warning('Missing response body for request %s', | |
- request_id) | |
- elif len(response['result']['body']): | |
+ logging.warning( | |
+ "Missing response body for request %s", request_id | |
+ ) | |
+ elif len(response["result"]["body"]): | |
self.body_fail_count = 0 | |
# Write the raw body to a file (all bodies) | |
- if 'base64Encoded' in response['result'] and \ | |
- response['result']['base64Encoded']: | |
- body = base64.b64decode(response['result']['body']) | |
+ if ( | |
+ "base64Encoded" in response["result"] | |
+ and response["result"]["base64Encoded"] | |
+ ): | |
+ body = base64.b64decode(response["result"]["body"]) | |
else: | |
- body = response['result']['body'].encode('utf-8') | |
+ body = response["result"]["body"].encode("utf-8") | |
is_text = True | |
# Add text bodies to the zip archive | |
if self.bodies_zip_file is not None and is_text: | |
self.body_index += 1 | |
- name = '{0:03d}-{1}-body.txt'.format(self.body_index, request_id) | |
+ name = "{0:03d}-{1}-body.txt".format( | |
+ self.body_index, request_id | |
+ ) | |
self.bodies_zip_file.writestr(name, body) | |
- logging.debug('%s: Stored body in zip', request_id) | |
- logging.debug('%s: Body length: %d', request_id, len(body)) | |
+ logging.debug("%s: Stored body in zip", request_id) | |
+ logging.debug("%s: Body length: %d", request_id, len(body)) | |
self.response_bodies[request_id] = body | |
- with open(body_file_path, 'wb') as body_file: | |
+ with open(body_file_path, "wb") as body_file: | |
body_file.write(body) | |
else: | |
self.body_fail_count = 0 | |
- self.response_bodies[request_id] = response['result']['body'] | |
+ self.response_bodies[request_id] = response["result"][ | |
+ "body" | |
+ ] | |
if os.path.exists(body_file_path): | |
- request['body'] = body_file_path | |
+ request["body"] = body_file_path | |
def get_header_value(self, headers, name): | |
"""Get the value for the requested header""" | |
value = None | |
try: | |
@@ -682,33 +779,36 @@ | |
value = headers[name] | |
else: | |
find = name.lower() | |
for header_name in headers: | |
check = header_name.lower() | |
- if check == find or (check[0] == ':' and check[1:] == find): | |
+ if check == find or (check[0] == ":" and check[1:] == find): | |
value = headers[header_name] | |
break | |
except Exception: | |
pass | |
return value | |
def prepare_task(self, task): | |
"""Format the file prefixes for multi-step testing""" | |
- if task['current_step'] == 1: | |
- task['prefix'] = task['task_prefix'] | |
- task['video_subdirectory'] = task['task_video_prefix'] | |
+ if task["current_step"] == 1: | |
+ task["prefix"] = task["task_prefix"] | |
+ task["video_subdirectory"] = task["task_video_prefix"] | |
else: | |
- task['prefix'] = '{0}_{1:d}'.format(task['task_prefix'], task['current_step']) | |
- task['video_subdirectory'] = '{0}_{1:d}'.format(task['task_video_prefix'], | |
- task['current_step']) | |
- if task['video_subdirectory'] not in task['video_directories']: | |
- task['video_directories'].append(task['video_subdirectory']) | |
+ task["prefix"] = "{0}_{1:d}".format( | |
+ task["task_prefix"], task["current_step"] | |
+ ) | |
+ task["video_subdirectory"] = "{0}_{1:d}".format( | |
+ task["task_video_prefix"], task["current_step"] | |
+ ) | |
+ if task["video_subdirectory"] not in task["video_directories"]: | |
+ task["video_directories"].append(task["video_subdirectory"]) | |
if self.event_name is not None: | |
- task['step_name'] = self.event_name | |
+ task["step_name"] = self.event_name | |
else: | |
- task['step_name'] = 'Step_{0:d}'.format(task['current_step']) | |
- self.path_base = os.path.join(self.task['dir'], self.task['prefix']) | |
+ task["step_name"] = "Step_{0:d}".format(task["current_step"]) | |
+ self.path_base = os.path.join(self.task["dir"], self.task["prefix"]) | |
def on_start_recording(self, task): | |
"""Notification that we are about to start an operation that needs to be recorded""" | |
self.page = {} | |
self.requests = {} | |
@@ -716,233 +816,283 @@ | |
self.response_bodies = {} | |
if self.timeline is not None: | |
self.timeline.close() | |
self.timeline = None | |
self.wpt_result = None | |
- task['page_data'] = {'date': time.time()} | |
- task['page_result'] = None | |
- task['run_start_time'] = monotonic.monotonic() | |
+ task["page_data"] = {"date": time.time()} | |
+ task["page_result"] = None | |
+ task["run_start_time"] = monotonic.monotonic() | |
self.flush_messages() | |
- self.send_command('Page.enable', {}) | |
- self.send_command('Inspector.enable', {}) | |
- self.send_command('Network.enable', {}) | |
- self.send_command('Inspector.enable', {}) | |
+ self.send_command("Page.enable", {}) | |
+ self.send_command("Inspector.enable", {}) | |
+ self.send_command("Network.enable", {}) | |
+ self.send_command("Inspector.enable", {}) | |
if self.headers: | |
- self.send_command('Network.setExtraHTTPHeaders', | |
- {'headers': self.headers}, wait=True) | |
- if self.task['log_data']: | |
- if not self.job['shaper'].configure(self.job, task): | |
- self.task['error'] = "Error configuring traffic-shaping" | |
+ self.send_command( | |
+ "Network.setExtraHTTPHeaders", {"headers": self.headers}, wait=True | |
+ ) | |
+ if self.task["log_data"]: | |
+ if not self.job["shaper"].configure(self.job, task): | |
+ self.task["error"] = "Error configuring traffic-shaping" | |
if self.bodies_zip_file is not None: | |
self.bodies_zip_file.close() | |
self.bodies_zip_file = None | |
- if 'bodies' in self.job and self.job['bodies']: | |
- self.bodies_zip_file = zipfile.ZipFile(self.path_base + '_bodies.zip', 'w', | |
- zipfile.ZIP_DEFLATED) | |
- self.send_command('Console.enable', {}) | |
- if 'timeline' in self.job and self.job['timeline']: | |
+ if "bodies" in self.job and self.job["bodies"]: | |
+ self.bodies_zip_file = zipfile.ZipFile( | |
+ self.path_base + "_bodies.zip", "w", zipfile.ZIP_DEFLATED | |
+ ) | |
+ self.send_command("Console.enable", {}) | |
+ if "timeline" in self.job and self.job["timeline"]: | |
if self.path_base is not None: | |
- timeline_path = self.path_base + '_devtools.json.gz' | |
- self.timeline = gzip.open(timeline_path, 'wb', 7) | |
+ timeline_path = self.path_base + "_devtools.json.gz" | |
+ self.timeline = gzip.open(timeline_path, "wb", 7) | |
if self.timeline: | |
- self.timeline.write('[\n') | |
+ self.timeline.write("[\n") | |
from internal.support.trace_parser import Trace | |
+ | |
self.trace_parser = Trace() | |
- self.trace_parser.cpu['main_thread'] = '0' | |
- self.trace_parser.threads['0'] = {} | |
- self.send_command('Timeline.start', {}) | |
+ self.trace_parser.cpu["main_thread"] = "0" | |
+ self.trace_parser.threads["0"] = {} | |
+ self.send_command("Timeline.start", {}) | |
self.ios.show_orange() | |
- if self.path_base is not None and not self.job['disable_video']: | |
- task['video_file'] = self.path_base + '_video.mp4' | |
+ if self.path_base is not None and not self.job["disable_video"]: | |
+ task["video_file"] = self.path_base + "_video.mp4" | |
self.ios.start_video() | |
if self.ios_version: | |
- task['page_data']['osVersion'] = self.ios_version | |
- task['page_data']['os_version'] = self.ios_version | |
- task['page_data']['browserVersion'] = self.ios_version | |
- task['page_data']['browser_version'] = self.ios_version | |
+ task["page_data"]["osVersion"] = self.ios_version | |
+ task["page_data"]["os_version"] = self.ios_version | |
+ task["page_data"]["browserVersion"] = self.ios_version | |
+ task["page_data"]["browser_version"] = self.ios_version | |
self.recording = True | |
now = monotonic.monotonic() | |
- if not self.task['stop_at_onload']: | |
+ if not self.task["stop_at_onload"]: | |
self.last_activity = now | |
if self.page_loaded is not None: | |
self.page_loaded = now | |
- logging.debug('Starting measurement') | |
- task['start_time'] = datetime.utcnow() | |
+ logging.debug("Starting measurement") | |
+ task["start_time"] = datetime.utcnow() | |
def on_stop_capture(self, task): | |
"""Do any quick work to stop things that are capturing data""" | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
hero_elements = None | |
custom_hero_selectors = {} | |
- if 'heroElements' in self.job: | |
- custom_hero_selectors = self.job['heroElements'] | |
- logging.debug('Collecting hero element positions') | |
- with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: | |
+ if "heroElements" in self.job: | |
+ custom_hero_selectors = self.job["heroElements"] | |
+ logging.debug("Collecting hero element positions") | |
+ with open( | |
+ os.path.join(self.script_dir, "hero_elements.js"), "rb" | |
+ ) as script_file: | |
hero_elements_script = script_file.read() | |
- script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' | |
+ script = ( | |
+ hero_elements_script + "(" + json.dumps(custom_hero_selectors) + ")" | |
+ ) | |
hero_elements = self.ios.execute_js(script) | |
if hero_elements is not None: | |
- path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = os.path.join( | |
+ task["dir"], task["prefix"] + "_hero_elements.json.gz" | |
+ ) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json.dumps(hero_elements)) | |
def on_stop_recording(self, task): | |
"""Notification that we are done with recording""" | |
self.recording = False | |
- self.send_command('Page.disable', {}) | |
- self.send_command('Inspector.disable', {}) | |
- self.send_command('Network.disable', {}) | |
- self.send_command('Inspector.disable', {}) | |
- if self.task['log_data']: | |
- self.send_command('Console.disable', {}) | |
- if 'timeline' in self.job and self.job['timeline']: | |
- self.send_command('Timeline.stop', {}) | |
- if self.job['pngScreenShot'] and self.path_base is not None: | |
- screen_shot = self.path_base + '_screen.png' | |
+ self.send_command("Page.disable", {}) | |
+ self.send_command("Inspector.disable", {}) | |
+ self.send_command("Network.disable", {}) | |
+ self.send_command("Inspector.disable", {}) | |
+ if self.task["log_data"]: | |
+ self.send_command("Console.disable", {}) | |
+ if "timeline" in self.job and self.job["timeline"]: | |
+ self.send_command("Timeline.stop", {}) | |
+ if self.job["pngScreenShot"] and self.path_base is not None: | |
+ screen_shot = self.path_base + "_screen.png" | |
self.grab_screenshot(screen_shot, png=True) | |
elif self.path_base is not None: | |
- screen_shot = self.path_base + '_screen.jpg' | |
+ screen_shot = self.path_base + "_screen.jpg" | |
self.grab_screenshot(screen_shot, png=False, resize=600) | |
# Grab the video and kick off processing async | |
- if 'video_file' in task: | |
+ if "video_file" in task: | |
self.ios.stop_video() | |
# Collect end of test data from the browser | |
self.collect_browser_metrics(task) | |
if self.bodies_zip_file is not None: | |
self.bodies_zip_file.close() | |
self.bodies_zip_file = None | |
- self.job['shaper'].reset() | |
+ self.job["shaper"].reset() | |
def on_start_processing(self, task): | |
"""Start any processing of the captured data""" | |
- if task['log_data']: | |
+ if task["log_data"]: | |
# Attach response bodies to all of the appropriate requests | |
requests = {} | |
for request_id in self.requests: | |
request = self.requests[request_id] | |
- if request['fromNet'] and 'url' in request and request['url'].startswith('http'): | |
- if not request['is_redirect'] and \ | |
- request['original_id'] in self.response_bodies: | |
- request['response_body'] = self.response_bodies[request['original_id']] | |
+ if ( | |
+ request["fromNet"] | |
+ and "url" in request | |
+ and request["url"].startswith("http") | |
+ ): | |
+ if ( | |
+ not request["is_redirect"] | |
+ and request["original_id"] in self.response_bodies | |
+ ): | |
+ request["response_body"] = self.response_bodies[ | |
+ request["original_id"] | |
+ ] | |
requests[request_id] = request | |
# Start the optimization checks in a background thread | |
self.optimization = OptimizationChecks(self.job, task, requests) | |
self.optimization.start() | |
- support_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "support") | |
+ support_path = os.path.join( | |
+ os.path.abspath(os.path.dirname(__file__)), "support" | |
+ ) | |
# Start processing the timeline | |
if self.timeline: | |
self.timeline.write("{}]") | |
self.timeline.close() | |
self.timeline = None | |
# Grab the video and kick off processing async | |
- if 'video_file' in task and self.ios.get_video(task['video_file']): | |
- video_path = os.path.join(task['dir'], task['video_subdirectory']) | |
- if task['current_step'] == 1: | |
- filename = '{0:d}.{1:d}.histograms.json.gz'.format(task['run'], task['cached']) | |
+ if "video_file" in task and self.ios.get_video(task["video_file"]): | |
+ video_path = os.path.join(task["dir"], task["video_subdirectory"]) | |
+ if task["current_step"] == 1: | |
+ filename = "{0:d}.{1:d}.histograms.json.gz".format( | |
+ task["run"], task["cached"] | |
+ ) | |
else: | |
- filename = '{0:d}.{1:d}.{2:d}.histograms.json.gz'.format(task['run'], | |
- task['cached'], | |
- task['current_step']) | |
- histograms = os.path.join(task['dir'], filename) | |
- progress_file = os.path.join(task['dir'], task['prefix']) + \ | |
- '_visual_progress.json.gz' | |
+ filename = "{0:d}.{1:d}.{2:d}.histograms.json.gz".format( | |
+ task["run"], task["cached"], task["current_step"] | |
+ ) | |
+ histograms = os.path.join(task["dir"], filename) | |
+ progress_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) | |
+ + "_visual_progress.json.gz" | |
+ ) | |
visualmetrics = os.path.join(support_path, "visualmetrics.py") | |
- args = ['python', visualmetrics, '-i', task['video_file'], | |
- '-d', video_path, '--force', '--quality', | |
- '{0:d}'.format(self.job['imageQuality']), | |
- '--viewport', '--orange', '--maxframes', '50', '--histogram', histograms, | |
- '--progress', progress_file] | |
- if 'debug' in self.job and self.job['debug']: | |
- args.append('-vvvv') | |
- if 'heroElementTimes' in self.job and self.job['heroElementTimes']: | |
- hero_elements_file = os.path.join(task['dir'], task['prefix']) + '_hero_elements.json.gz' | |
- args.extend(['--herodata', hero_elements_file]) | |
- if 'renderVideo' in self.job and self.job['renderVideo']: | |
- video_out = self.path_base + '_rendered_video.mp4' | |
- args.extend(['--render', video_out]) | |
- if 'fullSizeVideo' in self.job and self.job['fullSizeVideo']: | |
- args.append('--full') | |
- if 'thumbsize' in self.job: | |
+ args = [ | |
+ "python", | |
+ visualmetrics, | |
+ "-i", | |
+ task["video_file"], | |
+ "-d", | |
+ video_path, | |
+ "--force", | |
+ "--quality", | |
+ "{0:d}".format(self.job["imageQuality"]), | |
+ "--viewport", | |
+ "--orange", | |
+ "--maxframes", | |
+ "50", | |
+ "--histogram", | |
+ histograms, | |
+ "--progress", | |
+ progress_file, | |
+ ] | |
+ if "debug" in self.job and self.job["debug"]: | |
+ args.append("-vvvv") | |
+ if "heroElementTimes" in self.job and self.job["heroElementTimes"]: | |
+ hero_elements_file = ( | |
+ os.path.join(task["dir"], task["prefix"]) | |
+ + "_hero_elements.json.gz" | |
+ ) | |
+ args.extend(["--herodata", hero_elements_file]) | |
+ if "renderVideo" in self.job and self.job["renderVideo"]: | |
+ video_out = self.path_base + "_rendered_video.mp4" | |
+ args.extend(["--render", video_out]) | |
+ if "fullSizeVideo" in self.job and self.job["fullSizeVideo"]: | |
+ args.append("--full") | |
+ if "thumbsize" in self.job: | |
try: | |
- thumbsize = int(self.job['thumbsize']) | |
+ thumbsize = int(self.job["thumbsize"]) | |
if thumbsize > 0 and thumbsize <= 2000: | |
- args.extend(['--thumbsize', str(thumbsize)]) | |
+ args.extend(["--thumbsize", str(thumbsize)]) | |
except Exception: | |
pass | |
- logging.debug(' '.join(args)) | |
+ logging.debug(" ".join(args)) | |
self.video_processing = subprocess.Popen(args) | |
# Save the console logs | |
if self.console_log and self.path_base is not None: | |
- log_file = self.path_base + '_console_log.json.gz' | |
- with gzip.open(log_file, 'wb', 7) as f_out: | |
+ log_file = self.path_base + "_console_log.json.gz" | |
+ with gzip.open(log_file, "wb", 7) as f_out: | |
json.dump(self.console_log, f_out) | |
# Process the timeline data | |
if self.trace_parser is not None and self.path_base is not None: | |
start = monotonic.monotonic() | |
logging.debug("Processing the trace timeline events") | |
self.trace_parser.ProcessTimelineEvents() | |
- self.trace_parser.WriteCPUSlices(self.path_base + '_timeline_cpu.json.gz') | |
- self.trace_parser.WriteScriptTimings(self.path_base + '_script_timing.json.gz') | |
- self.trace_parser.WriteInteractive(self.path_base + '_interactive.json.gz') | |
+ self.trace_parser.WriteCPUSlices( | |
+ self.path_base + "_timeline_cpu.json.gz" | |
+ ) | |
+ self.trace_parser.WriteScriptTimings( | |
+ self.path_base + "_script_timing.json.gz" | |
+ ) | |
+ self.trace_parser.WriteInteractive( | |
+ self.path_base + "_interactive.json.gz" | |
+ ) | |
elapsed = monotonic.monotonic() - start | |
logging.debug("Done processing the trace events: %0.3fs", elapsed) | |
self.trace_parser = None | |
# Calculate the request and page stats | |
self.wpt_result = {} | |
- self.wpt_result['requests'] = self.process_requests(requests) | |
- self.wpt_result['pageData'] = self.calculate_page_stats(self.wpt_result['requests']) | |
+ self.wpt_result["requests"] = self.process_requests(requests) | |
+ self.wpt_result["pageData"] = self.calculate_page_stats( | |
+ self.wpt_result["requests"] | |
+ ) | |
def wait_for_processing(self, task): | |
"""Wait for any background processing threads to finish""" | |
if self.video_processing is not None: | |
- logging.debug('Waiting for video processing to finish') | |
+ logging.debug("Waiting for video processing to finish") | |
self.video_processing.communicate() | |
self.video_processing = None | |
- if not self.job['keepvideo']: | |
+ if not self.job["keepvideo"]: | |
try: | |
- os.remove(task['video_file']) | |
+ os.remove(task["video_file"]) | |
except Exception: | |
pass | |
opt = None | |
if self.optimization is not None: | |
opt = self.optimization.join() | |
if self.wpt_result is not None: | |
- self.process_optimization_results(self.wpt_result['pageData'], | |
- self.wpt_result['requests'], opt) | |
+ self.process_optimization_results( | |
+ self.wpt_result["pageData"], self.wpt_result["requests"], opt | |
+ ) | |
if self.path_base is not None: | |
- devtools_file = self.path_base + '_devtools_requests.json.gz' | |
- with gzip.open(devtools_file, 'wb', 7) as f_out: | |
+ devtools_file = self.path_base + "_devtools_requests.json.gz" | |
+ with gzip.open(devtools_file, "wb", 7) as f_out: | |
json.dump(self.wpt_result, f_out) | |
def step_complete(self, task): | |
"""Final step processing""" | |
logging.debug("Writing end-of-step data") | |
# Write out the accumulated page_data | |
- if task['log_data'] and task['page_data']: | |
- if 'browser' in self.job: | |
- task['page_data']['browser_name'] = self.job['browser'] | |
- if 'step_name' in task: | |
- task['page_data']['eventName'] = task['step_name'] | |
- if 'run_start_time' in task: | |
- task['page_data']['test_run_time_ms'] = \ | |
- int(round((monotonic.monotonic() - task['run_start_time']) * 1000.0)) | |
+ if task["log_data"] and task["page_data"]: | |
+ if "browser" in self.job: | |
+ task["page_data"]["browser_name"] = self.job["browser"] | |
+ if "step_name" in task: | |
+ task["page_data"]["eventName"] = task["step_name"] | |
+ if "run_start_time" in task: | |
+ task["page_data"]["test_run_time_ms"] = int( | |
+ round((monotonic.monotonic() - task["run_start_time"]) * 1000.0) | |
+ ) | |
if self.path_base is not None: | |
- path = self.path_base + '_page_data.json.gz' | |
- json_page_data = json.dumps(task['page_data']) | |
- logging.debug('Page Data: %s', json_page_data) | |
- with gzip.open(path, 'wb', 7) as outfile: | |
+ path = self.path_base + "_page_data.json.gz" | |
+ json_page_data = json.dumps(task["page_data"]) | |
+ logging.debug("Page Data: %s", json_page_data) | |
+ with gzip.open(path, "wb", 7) as outfile: | |
outfile.write(json_page_data) | |
def send_command(self, method, params, wait=False, timeout=10): | |
"""Send a raw dev tools message and optionally wait for the response""" | |
ret = None | |
if self.websocket: | |
self.command_id += 1 | |
command_id = int(self.command_id) | |
if wait: | |
self.pending_commands.append(command_id) | |
- msg = {'id': command_id, 'method': method, 'params': params} | |
+ msg = {"id": command_id, "method": method, "params": params} | |
try: | |
out = json.dumps(msg) | |
logging.debug("Sending: %s", out) | |
self.websocket.send(out) | |
if wait: | |
@@ -959,63 +1109,67 @@ | |
pass | |
except Exception as err: | |
logging.debug("Websocket send error: %s", err.__str__()) | |
return ret | |
- | |
def process_command(self, command): | |
"""Process an individual script command""" | |
logging.debug("Processing script command:") | |
logging.debug(command) | |
- if command['command'] == 'navigate': | |
- self.task['page_data']['URL'] = command['target'] | |
+ if command["command"] == "navigate": | |
+ self.task["page_data"]["URL"] = command["target"] | |
self.main_frame = None | |
self.main_request = None | |
self.is_navigating = True | |
- self.ios.navigate(command['target']) | |
- elif command['command'] == 'logdata': | |
- self.task['combine_steps'] = False | |
- if int(re.search(r'\d+', str(command['target'])).group()): | |
+ self.ios.navigate(command["target"]) | |
+ elif command["command"] == "logdata": | |
+ self.task["combine_steps"] = False | |
+ if int(re.search(r"\d+", str(command["target"])).group()): | |
logging.debug("Data logging enabled") | |
- self.task['log_data'] = True | |
+ self.task["log_data"] = True | |
else: | |
logging.debug("Data logging disabled") | |
- self.task['log_data'] = False | |
- elif command['command'] == 'combinesteps': | |
- self.task['log_data'] = True | |
- self.task['combine_steps'] = True | |
- elif command['command'] == 'seteventname': | |
- self.event_name = command['target'] | |
- elif command['command'] == 'exec': | |
- if command['record']: | |
+ self.task["log_data"] = False | |
+ elif command["command"] == "combinesteps": | |
+ self.task["log_data"] = True | |
+ self.task["combine_steps"] = True | |
+ elif command["command"] == "seteventname": | |
+ self.event_name = command["target"] | |
+ elif command["command"] == "exec": | |
+ if command["record"]: | |
self.main_frame = None | |
self.main_request = None | |
self.is_navigating = True | |
- self.ios.execute_js(command['target'], remove_orange=self.recording) | |
- elif command['command'] == 'sleep': | |
- delay = min(60, max(0, int(re.search(r'\d+', str(command['target'])).group()))) | |
+ self.ios.execute_js(command["target"], remove_orange=self.recording) | |
+ elif command["command"] == "sleep": | |
+ delay = min( | |
+ 60, max(0, int(re.search(r"\d+", str(command["target"])).group())) | |
+ ) | |
if delay > 0: | |
time.sleep(delay) | |
- elif command['command'] == 'setabm': | |
- self.task['stop_at_onload'] = \ | |
- bool('target' in command and int(re.search(r'\d+', | |
- str(command['target'])).group()) == 0) | |
- elif command['command'] == 'setactivitytimeout': | |
- if 'target' in command: | |
- milliseconds = int(re.search(r'\d+', str(command['target'])).group()) | |
- self.task['activity_time'] = max(0, min(30, float(milliseconds) / 1000.0)) | |
- elif command['command'] == 'setuseragent': | |
- self.task['user_agent_string'] = command['target'] | |
- elif command['command'] == 'setcookie': | |
+ elif command["command"] == "setabm": | |
+ self.task["stop_at_onload"] = bool( | |
+ "target" in command | |
+ and int(re.search(r"\d+", str(command["target"])).group()) == 0 | |
+ ) | |
+ elif command["command"] == "setactivitytimeout": | |
+ if "target" in command: | |
+ milliseconds = int(re.search(r"\d+", str(command["target"])).group()) | |
+ self.task["activity_time"] = max( | |
+ 0, min(30, float(milliseconds) / 1000.0) | |
+ ) | |
+ elif command["command"] == "setuseragent": | |
+ self.task["user_agent_string"] = command["target"] | |
+ elif command["command"] == "setcookie": | |
pass | |
- elif command['command'] == 'clearcache': | |
+ elif command["command"] == "clearcache": | |
self.ios.clear_cache() | |
- elif command['command'] == 'addheader': | |
- self.set_header(command['target']) | |
- elif command['command'] == 'setheader': | |
- self.set_header(command['target']) | |
- elif command['command'] == 'resetheaders': | |
+ elif command["command"] == "addheader": | |
+ self.set_header(command["target"]) | |
+ elif command["command"] == "setheader": | |
+ self.set_header(command["target"]) | |
+ elif command["command"] == "resetheaders": | |
self.reset_headers() | |
def navigate(self, url): | |
"""Navigate to the given URL""" | |
if self.connected: | |
@@ -1024,27 +1178,36 @@ | |
def grab_screenshot(self, path, png=True, resize=0): | |
"""Save the screen shot (png or jpeg)""" | |
if self.connected: | |
data = self.ios.screenshot() | |
if data: | |
- resize_string = '' if not resize else '-resize {0:d}x{0:d} '.format(resize) | |
+ resize_string = ( | |
+ "" if not resize else "-resize {0:d}x{0:d} ".format(resize) | |
+ ) | |
if png: | |
- with open(path, 'wb') as image_file: | |
+ with open(path, "wb") as image_file: | |
image_file.write(data) | |
if resize_string: | |
- cmd = '{0} -format png -define png:color-type=2 '\ | |
- '-depth 8 {1}"{2}"'.format(self.job['image_magick']['mogrify'], | |
- resize_string, path) | |
+ cmd = ( | |
+ "{0} -format png -define png:color-type=2 " | |
+ '-depth 8 {1}"{2}"'.format( | |
+ self.job["image_magick"]["mogrify"], resize_string, path | |
+ ) | |
+ ) | |
logging.debug(cmd) | |
subprocess.call(cmd, shell=True) | |
else: | |
- tmp_file = path + '.png' | |
- with open(tmp_file, 'wb') as image_file: | |
+ tmp_file = path + ".png" | |
+ with open(tmp_file, "wb") as image_file: | |
image_file.write(data) | |
command = '{0} "{1}" {2}-quality {3:d} "{4}"'.format( | |
- self.job['image_magick']['convert'], | |
- tmp_file, resize_string, self.job['imageQuality'], path) | |
+ self.job["image_magick"]["convert"], | |
+ tmp_file, | |
+ resize_string, | |
+ self.job["imageQuality"], | |
+ path, | |
+ ) | |
logging.debug(command) | |
subprocess.call(command, shell=True) | |
if os.path.isfile(tmp_file): | |
try: | |
os.remove(tmp_file) | |
@@ -1052,359 +1215,418 @@ | |
pass | |
def get_empty_request(self, request_id, url): | |
"""Return and empty, initialized request""" | |
parts = urlparse.urlsplit(url) | |
- request = {'type': 3, | |
- 'id': request_id, | |
- 'request_id': request_id, | |
- 'ip_addr': '', | |
- 'full_url': url, | |
- 'is_secure': 1 if parts.scheme == 'https' else 0, | |
- 'method': '', | |
- 'host': parts.netloc, | |
- 'url': parts.path, | |
- 'responseCode': -1, | |
- 'load_start': -1, | |
- 'load_ms': -1, | |
- 'ttfb_ms': -1, | |
- 'dns_start': -1, | |
- 'dns_end': -1, | |
- 'dns_ms': -1, | |
- 'connect_start': -1, | |
- 'connect_end': -1, | |
- 'connect_ms': -1, | |
- 'ssl_start': -1, | |
- 'ssl_end': -1, | |
- 'ssl_ms': -1, | |
- 'bytesIn': 0, | |
- 'bytesOut': 0, | |
- 'objectSize': 0, | |
- 'initiator': '', | |
- 'initiator_line': '', | |
- 'initiator_column': '', | |
- 'server_rtt': None, | |
- 'headers': {'request': [], 'response': []}, | |
- 'score_cache': -1, | |
- 'score_cdn': -1, | |
- 'score_gzip': -1, | |
- 'score_cookies': -1, | |
- 'score_keep-alive': -1, | |
- 'score_minify': -1, | |
- 'score_combine': -1, | |
- 'score_compress': -1, | |
- 'score_etags': -1, | |
- 'gzip_total': None, | |
- 'gzip_save': None, | |
- 'minify_total': None, | |
- 'minify_save': None, | |
- 'image_total': None, | |
- 'image_save': None, | |
- 'cache_time': None, | |
- 'cdn_provider': None, | |
- 'server_count': None, | |
- 'socket': -1 | |
- } | |
+ request = { | |
+ "type": 3, | |
+ "id": request_id, | |
+ "request_id": request_id, | |
+ "ip_addr": "", | |
+ "full_url": url, | |
+ "is_secure": 1 if parts.scheme == "https" else 0, | |
+ "method": "", | |
+ "host": parts.netloc, | |
+ "url": parts.path, | |
+ "responseCode": -1, | |
+ "load_start": -1, | |
+ "load_ms": -1, | |
+ "ttfb_ms": -1, | |
+ "dns_start": -1, | |
+ "dns_end": -1, | |
+ "dns_ms": -1, | |
+ "connect_start": -1, | |
+ "connect_end": -1, | |
+ "connect_ms": -1, | |
+ "ssl_start": -1, | |
+ "ssl_end": -1, | |
+ "ssl_ms": -1, | |
+ "bytesIn": 0, | |
+ "bytesOut": 0, | |
+ "objectSize": 0, | |
+ "initiator": "", | |
+ "initiator_line": "", | |
+ "initiator_column": "", | |
+ "server_rtt": None, | |
+ "headers": {"request": [], "response": []}, | |
+ "score_cache": -1, | |
+ "score_cdn": -1, | |
+ "score_gzip": -1, | |
+ "score_cookies": -1, | |
+ "score_keep-alive": -1, | |
+ "score_minify": -1, | |
+ "score_combine": -1, | |
+ "score_compress": -1, | |
+ "score_etags": -1, | |
+ "gzip_total": None, | |
+ "gzip_save": None, | |
+ "minify_total": None, | |
+ "minify_save": None, | |
+ "image_total": None, | |
+ "image_save": None, | |
+ "cache_time": None, | |
+ "cdn_provider": None, | |
+ "server_count": None, | |
+ "socket": -1, | |
+ } | |
if parts.query: | |
- request['url'] += '?' + parts.query | |
+ request["url"] += "?" + parts.query | |
return request | |
def process_requests(self, raw_requests): | |
"""Convert all of the request events into the format needed for WPT""" | |
requests = [] | |
- if 'start' in self.page: | |
- start = self.page['start'] | |
+ if "start" in self.page: | |
+ start = self.page["start"] | |
for request_id in raw_requests: | |
r = raw_requests[request_id] | |
- request = self.get_empty_request(request_id, r['url']) | |
- if 'ip' in r: | |
- request['ip_addr'] = r['ip'] | |
- if 'connection' in r: | |
- request['socket'] = r['connection'] | |
- if 'priority' in r: | |
- request['priority'] = r['priority'] | |
- if 'protocol' in r: | |
- request['protocol'] = r['protocol'] | |
- if 'method' in r: | |
- request['method'] = r['method'] | |
- if 'status' in r: | |
- request['responseCode'] = r['status'] | |
- if 'type' in r: | |
- request['requestType'] = r['type'] | |
- if 'created' in r: | |
- request['created'] = int(round((r['created'] - start) * 1000.0)) | |
- request['load_start'] = int(round((r['start'] - start) * 1000.0)) | |
- if 'end' in r: | |
- request['load_ms'] = int(round((r['end'] - r['start']) * 1000.0)) | |
- if 'firstByte' in r: | |
- request['ttfb_ms'] = int(round((r['firstByte'] - r['start']) * 1000.0)) | |
- if 'timing' in r and not r['is_redirect']: | |
- start_ms = int(request['load_start']) | |
- timing = r['timing'] | |
- if timing['domainLookupStart'] > 0 or timing['domainLookupEnd'] > 0: | |
- request['dns_start'] = int(round(start_ms + timing['domainLookupStart'])) | |
- request['dns_end'] = int(round(start_ms + timing['domainLookupEnd'])) | |
- if timing['connectStart'] > 0 or timing['connectEnd'] > 0: | |
- request['connect_start'] = int(round(start_ms + timing['connectStart'])) | |
- request['connect_end'] = int(round(start_ms + timing['connectEnd'])) | |
- if timing['secureConnectionStart'] >= 0: | |
- request['ssl_start'] = int(round(start_ms + | |
- timing['secureConnectionStart'])) | |
- request['ssl_end'] = request['connect_end'] | |
- request['connect_end'] = request['ssl_start'] | |
- if timing['requestStart'] >= 0: | |
- request['load_start'] = int(round(start_ms + timing['requestStart'])) | |
- request['load_ms'] -= int(round(timing['requestStart'])) | |
- request['ttfb_ms'] -= int(round(timing['requestStart'])) | |
- if timing['responseStart'] >= 0: | |
- request['ttfb_ms'] = int(round(timing['responseStart'] - | |
- timing['requestStart'])) | |
- if 'chunks' in r: | |
- request['chunks'] = [] | |
- for chunk in r['chunks']: | |
- ts = (chunk['ts'] - start) * 1000.0 | |
- request['chunks'].append({'ts': ts, 'bytes': chunk['bytes']}) | |
- request['bytesIn'] = r['bytesIn'] | |
- if 'bytesOut' in r: | |
- request['bytesOut'] = r['bytesOut'] | |
- if 'objectSize' in r: | |
- request['objectSize'] = r['objectSize'] | |
- if 'objectSizeUncompressed' in r: | |
- request['objectSizeUncompressed'] = r['objectSizeUncompressed'] | |
- if 'initiator' in r: | |
- if 'url' in r['initiator']: | |
- request['initiator'] = r['initiator']['url'] | |
- if 'lineNumber' in r['initiator']: | |
- request['initiator_line'] = r['initiator']['lineNumber'] | |
- elif 'stackTrace' in r['initiator'] and r['initiator']['stackTrace']: | |
- for entry in r['initiator']['stackTrace']: | |
- if 'url' in entry and entry['url'].startswith('http'): | |
- request['initiator'] = entry['url'] | |
- if 'lineNumber' in entry: | |
- request['initiator_line'] = entry['lineNumber'] | |
- if 'columnNumber' in entry: | |
- request['initiator_column'] = entry['columnNumber'] | |
+ request = self.get_empty_request(request_id, r["url"]) | |
+ if "ip" in r: | |
+ request["ip_addr"] = r["ip"] | |
+ if "connection" in r: | |
+ request["socket"] = r["connection"] | |
+ if "priority" in r: | |
+ request["priority"] = r["priority"] | |
+ if "protocol" in r: | |
+ request["protocol"] = r["protocol"] | |
+ if "method" in r: | |
+ request["method"] = r["method"] | |
+ if "status" in r: | |
+ request["responseCode"] = r["status"] | |
+ if "type" in r: | |
+ request["requestType"] = r["type"] | |
+ if "created" in r: | |
+ request["created"] = int(round((r["created"] - start) * 1000.0)) | |
+ request["load_start"] = int(round((r["start"] - start) * 1000.0)) | |
+ if "end" in r: | |
+ request["load_ms"] = int(round((r["end"] - r["start"]) * 1000.0)) | |
+ if "firstByte" in r: | |
+ request["ttfb_ms"] = int( | |
+ round((r["firstByte"] - r["start"]) * 1000.0) | |
+ ) | |
+ if "timing" in r and not r["is_redirect"]: | |
+ start_ms = int(request["load_start"]) | |
+ timing = r["timing"] | |
+ if timing["domainLookupStart"] > 0 or timing["domainLookupEnd"] > 0: | |
+ request["dns_start"] = int( | |
+ round(start_ms + timing["domainLookupStart"]) | |
+ ) | |
+ request["dns_end"] = int( | |
+ round(start_ms + timing["domainLookupEnd"]) | |
+ ) | |
+ if timing["connectStart"] > 0 or timing["connectEnd"] > 0: | |
+ request["connect_start"] = int( | |
+ round(start_ms + timing["connectStart"]) | |
+ ) | |
+ request["connect_end"] = int( | |
+ round(start_ms + timing["connectEnd"]) | |
+ ) | |
+ if timing["secureConnectionStart"] >= 0: | |
+ request["ssl_start"] = int( | |
+ round(start_ms + timing["secureConnectionStart"]) | |
+ ) | |
+ request["ssl_end"] = request["connect_end"] | |
+ request["connect_end"] = request["ssl_start"] | |
+ if timing["requestStart"] >= 0: | |
+ request["load_start"] = int( | |
+ round(start_ms + timing["requestStart"]) | |
+ ) | |
+ request["load_ms"] -= int(round(timing["requestStart"])) | |
+ request["ttfb_ms"] -= int(round(timing["requestStart"])) | |
+ if timing["responseStart"] >= 0: | |
+ request["ttfb_ms"] = int( | |
+ round(timing["responseStart"] - timing["requestStart"]) | |
+ ) | |
+ if "chunks" in r: | |
+ request["chunks"] = [] | |
+ for chunk in r["chunks"]: | |
+ ts = (chunk["ts"] - start) * 1000.0 | |
+ request["chunks"].append({"ts": ts, "bytes": chunk["bytes"]}) | |
+ request["bytesIn"] = r["bytesIn"] | |
+ if "bytesOut" in r: | |
+ request["bytesOut"] = r["bytesOut"] | |
+ if "objectSize" in r: | |
+ request["objectSize"] = r["objectSize"] | |
+ if "objectSizeUncompressed" in r: | |
+ request["objectSizeUncompressed"] = r["objectSizeUncompressed"] | |
+ if "initiator" in r: | |
+ if "url" in r["initiator"]: | |
+ request["initiator"] = r["initiator"]["url"] | |
+ if "lineNumber" in r["initiator"]: | |
+ request["initiator_line"] = r["initiator"]["lineNumber"] | |
+ elif ( | |
+ "stackTrace" in r["initiator"] and r["initiator"]["stackTrace"] | |
+ ): | |
+ for entry in r["initiator"]["stackTrace"]: | |
+ if "url" in entry and entry["url"].startswith("http"): | |
+ request["initiator"] = entry["url"] | |
+ if "lineNumber" in entry: | |
+ request["initiator_line"] = entry["lineNumber"] | |
+ if "columnNumber" in entry: | |
+ request["initiator_column"] = entry[ | |
+ "columnNumber" | |
+ ] | |
break | |
- if 'request_headers' in r: | |
- for name in r['request_headers']: | |
- for value in r['request_headers'][name].splitlines(): | |
- request['headers']['request'].append(u'{0}: {1}'.format(name, value)) | |
- if 'response_headers' in r: | |
- for name in r['response_headers']: | |
- for value in r['response_headers'][name].splitlines(): | |
- request['headers']['response'].append(u'{0}: {1}'.format(name, value)) | |
- value = self.get_header_value(r['response_headers'], 'Expires') | |
+ if "request_headers" in r: | |
+ for name in r["request_headers"]: | |
+ for value in r["request_headers"][name].splitlines(): | |
+ request["headers"]["request"].append( | |
+ u"{0}: {1}".format(name, value) | |
+ ) | |
+ if "response_headers" in r: | |
+ for name in r["response_headers"]: | |
+ for value in r["response_headers"][name].splitlines(): | |
+ request["headers"]["response"].append( | |
+ u"{0}: {1}".format(name, value) | |
+ ) | |
+ value = self.get_header_value(r["response_headers"], "Expires") | |
if value: | |
- request['expires'] = value | |
- value = self.get_header_value(r['response_headers'], 'Cache-Control') | |
+ request["expires"] = value | |
+ value = self.get_header_value( | |
+ r["response_headers"], "Cache-Control" | |
+ ) | |
if value: | |
- request['cacheControl'] = value | |
- value = self.get_header_value(r['response_headers'], 'Content-Type') | |
+ request["cacheControl"] = value | |
+ value = self.get_header_value(r["response_headers"], "Content-Type") | |
if value: | |
- request['contentType'] = value | |
- value = self.get_header_value(r['response_headers'], 'Content-Encoding') | |
+ request["contentType"] = value | |
+ value = self.get_header_value( | |
+ r["response_headers"], "Content-Encoding" | |
+ ) | |
if value: | |
- request['contentEncoding'] = value | |
+ request["contentEncoding"] = value | |
# If a content-length header is available, use that instead of the values | |
# reported by Safari which only show the unencoded size (even though it | |
# claims otherwise). | |
try: | |
- value = self.get_header_value(r['response_headers'], 'Content-Length') | |
+ value = self.get_header_value( | |
+ r["response_headers"], "Content-Length" | |
+ ) | |
if value: | |
content_length = int(value) | |
if content_length >= 0: | |
- request['objectSize'] = content_length | |
- request['bytesIn'] = content_length + \ | |
- sum(len(s) for s in request['headers']['response']) | |
+ request["objectSize"] = content_length | |
+ request["bytesIn"] = content_length + sum( | |
+ len(s) for s in request["headers"]["response"] | |
+ ) | |
except Exception: | |
pass | |
requests.append(request) | |
- requests.sort(key=lambda x: x['load_start']) | |
+ requests.sort(key=lambda x: x["load_start"]) | |
return requests | |
def calculate_page_stats(self, requests): | |
"""Calculate the page-level stats""" | |
- page = {'loadTime': 0, | |
- 'docTime': 0, | |
- 'fullyLoaded': 0, | |
- 'bytesOut': 0, | |
- 'bytesOutDoc': 0, | |
- 'bytesIn': 0, | |
- 'bytesInDoc': 0, | |
- 'requests': len(requests), | |
- 'requestsDoc': 0, | |
- 'responses_200': 0, | |
- 'responses_404': 0, | |
- 'responses_other': 0, | |
- 'result': 0, | |
- 'testStartOffset': 0, | |
- 'cached': 1 if self.task['cached'] else 0, | |
- 'optimization_checked': 0, | |
- 'start_epoch': int((self.task['start_time'] - \ | |
- datetime.utcfromtimestamp(0)).total_seconds()) | |
- } | |
- if 'loadEventStart' in self.task['page_data']: | |
- page['loadTime'] = self.task['page_data']['loadEventStart'] | |
- page['docTime'] = page['loadTime'] | |
- page['loadEventStart'] = page['loadTime'] | |
- page['loadEventEnd'] = page['loadTime'] | |
- if 'loaded' in self.page: | |
- page['loadTime'] = int(round((self.page['loaded'] - self.page['start']) * 1000.0)) | |
- page['docTime'] = page['loadTime'] | |
- page['loadEventStart'] = page['loadTime'] | |
- page['loadEventEnd'] = page['loadTime'] | |
- if 'DOMContentLoaded' in self.page: | |
- page['domContentLoadedEventStart'] = int(round((self.page['DOMContentLoaded'] - | |
- self.page['start']) * 1000.0)) | |
- page['domContentLoadedEventEnd'] = page['domContentLoadedEventStart'] | |
+ page = { | |
+ "loadTime": 0, | |
+ "docTime": 0, | |
+ "fullyLoaded": 0, | |
+ "bytesOut": 0, | |
+ "bytesOutDoc": 0, | |
+ "bytesIn": 0, | |
+ "bytesInDoc": 0, | |
+ "requests": len(requests), | |
+ "requestsDoc": 0, | |
+ "responses_200": 0, | |
+ "responses_404": 0, | |
+ "responses_other": 0, | |
+ "result": 0, | |
+ "testStartOffset": 0, | |
+ "cached": 1 if self.task["cached"] else 0, | |
+ "optimization_checked": 0, | |
+ "start_epoch": int( | |
+ (self.task["start_time"] - datetime.utcfromtimestamp(0)).total_seconds() | |
+ ), | |
+ } | |
+ if "loadEventStart" in self.task["page_data"]: | |
+ page["loadTime"] = self.task["page_data"]["loadEventStart"] | |
+ page["docTime"] = page["loadTime"] | |
+ page["loadEventStart"] = page["loadTime"] | |
+ page["loadEventEnd"] = page["loadTime"] | |
+ if "loaded" in self.page: | |
+ page["loadTime"] = int( | |
+ round((self.page["loaded"] - self.page["start"]) * 1000.0) | |
+ ) | |
+ page["docTime"] = page["loadTime"] | |
+ page["loadEventStart"] = page["loadTime"] | |
+ page["loadEventEnd"] = page["loadTime"] | |
+ if "DOMContentLoaded" in self.page: | |
+ page["domContentLoadedEventStart"] = int( | |
+ round((self.page["DOMContentLoaded"] - self.page["start"]) * 1000.0) | |
+ ) | |
+ page["domContentLoadedEventEnd"] = page["domContentLoadedEventStart"] | |
main_request = None | |
index = 0 | |
for request in requests: | |
- if request['load_ms'] >= 0: | |
- end_time = request['load_start'] + request['load_ms'] | |
- if end_time > page['fullyLoaded']: | |
- page['fullyLoaded'] = end_time | |
- if end_time <= page['loadTime']: | |
- page['requestsDoc'] += 1 | |
- page['bytesInDoc'] += request['bytesIn'] | |
- page['bytesOutDoc'] += request['bytesOut'] | |
- page['bytesIn'] += request['bytesIn'] | |
- page['bytesOut'] += request['bytesOut'] | |
- if request['responseCode'] == 200: | |
- page['responses_200'] += 1 | |
- elif request['responseCode'] == 404: | |
- page['responses_404'] += 1 | |
- page['result'] = 99999 | |
- elif request['responseCode'] > -1: | |
- page['responses_other'] += 1 | |
- if main_request is None and \ | |
- (request['responseCode'] == 200 or request['responseCode'] == 304): | |
- main_request = request['id'] | |
- request['is_base_page'] = True | |
- page['final_base_page_request'] = index | |
- page['final_base_page_request_id'] = main_request | |
- page['final_url'] = request['full_url'] | |
- if 'URL' not in self.task['page_data']: | |
- self.task['page_data']['URL'] = page['final_url'] | |
- if request['ttfb_ms'] >= 0: | |
- page['TTFB'] = request['load_start'] + request['ttfb_ms'] | |
- if request['ssl_end'] >= request['ssl_start'] and \ | |
- request['ssl_start'] >= 0: | |
- page['basePageSSLTime'] = int(round(request['ssl_end'] - \ | |
- request['ssl_start'])) | |
+ if request["load_ms"] >= 0: | |
+ end_time = request["load_start"] + request["load_ms"] | |
+ if end_time > page["fullyLoaded"]: | |
+ page["fullyLoaded"] = end_time | |
+ if end_time <= page["loadTime"]: | |
+ page["requestsDoc"] += 1 | |
+ page["bytesInDoc"] += request["bytesIn"] | |
+ page["bytesOutDoc"] += request["bytesOut"] | |
+ page["bytesIn"] += request["bytesIn"] | |
+ page["bytesOut"] += request["bytesOut"] | |
+ if request["responseCode"] == 200: | |
+ page["responses_200"] += 1 | |
+ elif request["responseCode"] == 404: | |
+ page["responses_404"] += 1 | |
+ page["result"] = 99999 | |
+ elif request["responseCode"] > -1: | |
+ page["responses_other"] += 1 | |
+ if main_request is None and ( | |
+ request["responseCode"] == 200 or request["responseCode"] == 304 | |
+ ): | |
+ main_request = request["id"] | |
+ request["is_base_page"] = True | |
+ page["final_base_page_request"] = index | |
+ page["final_base_page_request_id"] = main_request | |
+ page["final_url"] = request["full_url"] | |
+ if "URL" not in self.task["page_data"]: | |
+ self.task["page_data"]["URL"] = page["final_url"] | |
+ if request["ttfb_ms"] >= 0: | |
+ page["TTFB"] = request["load_start"] + request["ttfb_ms"] | |
+ if ( | |
+ request["ssl_end"] >= request["ssl_start"] | |
+ and request["ssl_start"] >= 0 | |
+ ): | |
+ page["basePageSSLTime"] = int( | |
+ round(request["ssl_end"] - request["ssl_start"]) | |
+ ) | |
if self.nav_error_code is not None: | |
- page['result'] = self.nav_error_code | |
- elif page['responses_200'] == 0 and len(requests): | |
- if 'responseCode' in requests[0]: | |
- page['result'] = requests[0]['responseCode'] | |
+ page["result"] = self.nav_error_code | |
+ elif page["responses_200"] == 0 and len(requests): | |
+ if "responseCode" in requests[0]: | |
+ page["result"] = requests[0]["responseCode"] | |
else: | |
- page['result'] = 12999 | |
- self.task['page_result'] = page['result'] | |
+ page["result"] = 12999 | |
+ self.task["page_result"] = page["result"] | |
return page | |
def process_optimization_results(self, page_data, requests, optimization_results): | |
"""Merge the data from the optimization checks file""" | |
if optimization_results: | |
- page_data['score_cache'] = -1 | |
- page_data['score_cdn'] = -1 | |
- page_data['score_gzip'] = -1 | |
- page_data['score_cookies'] = -1 | |
- page_data['score_keep-alive'] = -1 | |
- page_data['score_minify'] = -1 | |
- page_data['score_combine'] = -1 | |
- page_data['score_compress'] = -1 | |
- page_data['score_etags'] = -1 | |
- page_data['score_progressive_jpeg'] = -1 | |
- page_data['gzip_total'] = 0 | |
- page_data['gzip_savings'] = 0 | |
- page_data['minify_total'] = -1 | |
- page_data['minify_savings'] = -1 | |
- page_data['image_total'] = 0 | |
- page_data['image_savings'] = 0 | |
- page_data['optimization_checked'] = 1 | |
- page_data['base_page_cdn'] = '' | |
+ page_data["score_cache"] = -1 | |
+ page_data["score_cdn"] = -1 | |
+ page_data["score_gzip"] = -1 | |
+ page_data["score_cookies"] = -1 | |
+ page_data["score_keep-alive"] = -1 | |
+ page_data["score_minify"] = -1 | |
+ page_data["score_combine"] = -1 | |
+ page_data["score_compress"] = -1 | |
+ page_data["score_etags"] = -1 | |
+ page_data["score_progressive_jpeg"] = -1 | |
+ page_data["gzip_total"] = 0 | |
+ page_data["gzip_savings"] = 0 | |
+ page_data["minify_total"] = -1 | |
+ page_data["minify_savings"] = -1 | |
+ page_data["image_total"] = 0 | |
+ page_data["image_savings"] = 0 | |
+ page_data["optimization_checked"] = 1 | |
+ page_data["base_page_cdn"] = "" | |
cache_count = 0 | |
cache_total = 0 | |
cdn_count = 0 | |
cdn_total = 0 | |
keep_alive_count = 0 | |
keep_alive_total = 0 | |
progressive_total_bytes = 0 | |
progressive_bytes = 0 | |
for request in requests: | |
- if request['responseCode'] == 200: | |
- request_id = str(request['id']) | |
- pos = request_id.find('-') | |
+ if request["responseCode"] == 200: | |
+ request_id = str(request["id"]) | |
+ pos = request_id.find("-") | |
if pos > 0: | |
request_id = request_id[:pos] | |
if request_id in optimization_results: | |
opt = optimization_results[request_id] | |
- if 'cache' in opt: | |
- request['score_cache'] = opt['cache']['score'] | |
- request['cache_time'] = opt['cache']['time'] | |
+ if "cache" in opt: | |
+ request["score_cache"] = opt["cache"]["score"] | |
+ request["cache_time"] = opt["cache"]["time"] | |
cache_count += 1 | |
- cache_total += request['score_cache'] | |
- if 'cdn' in opt: | |
- request['score_cdn'] = opt['cdn']['score'] | |
- request['cdn_provider'] = opt['cdn']['provider'] | |
+ cache_total += request["score_cache"] | |
+ if "cdn" in opt: | |
+ request["score_cdn"] = opt["cdn"]["score"] | |
+ request["cdn_provider"] = opt["cdn"]["provider"] | |
cdn_count += 1 | |
- cdn_total += request['score_cdn'] | |
- if 'is_base_page' in request and request['is_base_page'] and \ | |
- request['cdn_provider'] is not None: | |
- page_data['base_page_cdn'] = request['cdn_provider'] | |
- if 'keep_alive' in opt: | |
- request['score_keep-alive'] = opt['keep_alive']['score'] | |
+ cdn_total += request["score_cdn"] | |
+ if ( | |
+ "is_base_page" in request | |
+ and request["is_base_page"] | |
+ and request["cdn_provider"] is not None | |
+ ): | |
+ page_data["base_page_cdn"] = request["cdn_provider"] | |
+ if "keep_alive" in opt: | |
+ request["score_keep-alive"] = opt["keep_alive"]["score"] | |
keep_alive_count += 1 | |
- keep_alive_total += request['score_keep-alive'] | |
- if 'gzip' in opt: | |
- savings = opt['gzip']['size'] - opt['gzip']['target_size'] | |
- request['score_gzip'] = opt['gzip']['score'] | |
- request['gzip_total'] = opt['gzip']['size'] | |
- request['gzip_save'] = savings | |
- page_data['gzip_total'] += opt['gzip']['size'] | |
- page_data['gzip_savings'] += savings | |
- if 'image' in opt: | |
- savings = opt['image']['size'] - opt['image']['target_size'] | |
- request['score_compress'] = opt['image']['score'] | |
- request['image_total'] = opt['image']['size'] | |
- request['image_save'] = savings | |
- page_data['image_total'] += opt['image']['size'] | |
- page_data['image_savings'] += savings | |
- if 'progressive' in opt: | |
- size = opt['progressive']['size'] | |
- request['jpeg_scan_count'] = opt['progressive']['scan_count'] | |
+ keep_alive_total += request["score_keep-alive"] | |
+ if "gzip" in opt: | |
+ savings = opt["gzip"]["size"] - opt["gzip"]["target_size"] | |
+ request["score_gzip"] = opt["gzip"]["score"] | |
+ request["gzip_total"] = opt["gzip"]["size"] | |
+ request["gzip_save"] = savings | |
+ page_data["gzip_total"] += opt["gzip"]["size"] | |
+ page_data["gzip_savings"] += savings | |
+ if "image" in opt: | |
+ savings = opt["image"]["size"] - opt["image"]["target_size"] | |
+ request["score_compress"] = opt["image"]["score"] | |
+ request["image_total"] = opt["image"]["size"] | |
+ request["image_save"] = savings | |
+ page_data["image_total"] += opt["image"]["size"] | |
+ page_data["image_savings"] += savings | |
+ if "progressive" in opt: | |
+ size = opt["progressive"]["size"] | |
+ request["jpeg_scan_count"] = opt["progressive"][ | |
+ "scan_count" | |
+ ] | |
progressive_total_bytes += size | |
- if request['jpeg_scan_count'] > 1: | |
- request['score_progressive_jpeg'] = 100 | |
+ if request["jpeg_scan_count"] > 1: | |
+ request["score_progressive_jpeg"] = 100 | |
progressive_bytes += size | |
elif size < 10240: | |
- request['score_progressive_jpeg'] = 50 | |
+ request["score_progressive_jpeg"] = 50 | |
else: | |
- request['score_progressive_jpeg'] = 0 | |
+ request["score_progressive_jpeg"] = 0 | |
if cache_count > 0: | |
- page_data['score_cache'] = int(round(cache_total / cache_count)) | |
+ page_data["score_cache"] = int(round(cache_total / cache_count)) | |
if cdn_count > 0: | |
- page_data['score_cdn'] = int(round(cdn_total / cdn_count)) | |
+ page_data["score_cdn"] = int(round(cdn_total / cdn_count)) | |
if keep_alive_count > 0: | |
- page_data['score_keep-alive'] = int(round(keep_alive_total / keep_alive_count)) | |
- if page_data['gzip_total'] > 0: | |
- page_data['score_gzip'] = 100 - int(page_data['gzip_savings'] * 100 / | |
- page_data['gzip_total']) | |
- if page_data['image_total'] > 0: | |
- page_data['score_compress'] = 100 - int(page_data['image_savings'] * 100 / | |
- page_data['image_total']) | |
+ page_data["score_keep-alive"] = int( | |
+ round(keep_alive_total / keep_alive_count) | |
+ ) | |
+ if page_data["gzip_total"] > 0: | |
+ page_data["score_gzip"] = 100 - int( | |
+ page_data["gzip_savings"] * 100 / page_data["gzip_total"] | |
+ ) | |
+ if page_data["image_total"] > 0: | |
+ page_data["score_compress"] = 100 - int( | |
+ page_data["image_savings"] * 100 / page_data["image_total"] | |
+ ) | |
if progressive_total_bytes > 0: | |
- page_data['score_progressive_jpeg'] = int(round(progressive_bytes * 100 / | |
- progressive_total_bytes)) | |
+ page_data["score_progressive_jpeg"] = int( | |
+ round(progressive_bytes * 100 / progressive_total_bytes) | |
+ ) | |
+ | |
class DevToolsClient(WebSocketClient): | |
"""DevTools Websocket client""" | |
- def __init__(self, url, protocols=None, extensions=None, heartbeat_freq=None, | |
- ssl_options=None, headers=None): | |
- WebSocketClient.__init__(self, url, protocols, extensions, heartbeat_freq, | |
- ssl_options, headers) | |
+ | |
+ def __init__( | |
+ self, | |
+ url, | |
+ protocols=None, | |
+ extensions=None, | |
+ heartbeat_freq=None, | |
+ ssl_options=None, | |
+ headers=None, | |
+ ): | |
+ WebSocketClient.__init__( | |
+ self, url, protocols, extensions, heartbeat_freq, ssl_options, headers | |
+ ) | |
self.connected = False | |
self.messages = None | |
self.trace_file = None | |
def opened(self): | |
@@ -1419,11 +1641,15 @@ | |
def received_message(self, raw): | |
"""Websocket interface - message received""" | |
try: | |
if raw.is_text: | |
- message = raw.data.decode(raw.encoding) if raw.encoding is not None else raw.data | |
+ message = ( | |
+ raw.data.decode(raw.encoding) | |
+ if raw.encoding is not None | |
+ else raw.data | |
+ ) | |
if message.find("Timeline.eventRecorded") == -1: | |
logging.debug(message[:200]) | |
if message: | |
message = json.loads(message) | |
if message: | |
--- ws4py/client/__init__.py 2018-09-21 20:15:22.161086 +0000 | |
+++ ws4py/client/__init__.py 2019-02-06 17:08:29.752869 +0000 | |
@@ -8,15 +8,23 @@ | |
from ws4py import WS_KEY, WS_VERSION | |
from ws4py.exc import HandshakeError | |
from ws4py.websocket import WebSocket | |
from ws4py.compat import urlsplit | |
-__all__ = ['WebSocketBaseClient'] | |
+__all__ = ["WebSocketBaseClient"] | |
+ | |
class WebSocketBaseClient(WebSocket): | |
- def __init__(self, url, protocols=None, extensions=None, | |
- heartbeat_freq=None, ssl_options=None, headers=None): | |
+ def __init__( | |
+ self, | |
+ url, | |
+ protocols=None, | |
+ extensions=None, | |
+ heartbeat_freq=None, | |
+ ssl_options=None, | |
+ headers=None, | |
+ ): | |
""" | |
A websocket client that implements :rfc:`6455` and provides a simple | |
interface to communicate with a websocket server. | |
This class works on its own but will block if not run in | |
@@ -90,37 +98,48 @@ | |
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0) | |
else: | |
# Let's handle IPv4 and IPv6 addresses | |
# Simplified from CherryPy's code | |
try: | |
- family, socktype, proto, canonname, sa = socket.getaddrinfo(self.host, self.port, | |
- socket.AF_UNSPEC, | |
- socket.SOCK_STREAM, | |
- 0, socket.AI_PASSIVE)[0] | |
+ family, socktype, proto, canonname, sa = socket.getaddrinfo( | |
+ self.host, | |
+ self.port, | |
+ socket.AF_UNSPEC, | |
+ socket.SOCK_STREAM, | |
+ 0, | |
+ socket.AI_PASSIVE, | |
+ )[0] | |
except socket.gaierror: | |
family = socket.AF_INET | |
- if self.host.startswith('::'): | |
+ if self.host.startswith("::"): | |
family = socket.AF_INET6 | |
socktype = socket.SOCK_STREAM | |
proto = 0 | |
canonname = "" | |
sa = (self.host, self.port, 0, 0) | |
sock = socket.socket(family, socktype, proto) | |
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) | |
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | |
- if hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 and \ | |
- self.host.startswith('::'): | |
+ if ( | |
+ hasattr(socket, "AF_INET6") | |
+ and family == socket.AF_INET6 | |
+ and self.host.startswith("::") | |
+ ): | |
try: | |
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) | |
except (AttributeError, socket.error): | |
pass | |
- WebSocket.__init__(self, sock, protocols=protocols, | |
- extensions=extensions, | |
- heartbeat_freq=heartbeat_freq) | |
+ WebSocket.__init__( | |
+ self, | |
+ sock, | |
+ protocols=protocols, | |
+ extensions=extensions, | |
+ heartbeat_freq=heartbeat_freq, | |
+ ) | |
self.stream.always_mask = True | |
self.stream.expect_masking = False | |
self.key = b64encode(os.urandom(16)) | |
@@ -149,12 +168,12 @@ | |
scheme, url = self.url.split(":", 1) | |
parsed = urlsplit(url, scheme="http") | |
if parsed.hostname: | |
self.host = parsed.hostname | |
- elif '+unix' in scheme: | |
- self.host = 'localhost' | |
+ elif "+unix" in scheme: | |
+ self.host = "localhost" | |
else: | |
raise ValueError("Invalid hostname from: %s", self.url) | |
if parsed.port: | |
self.port = parsed.port | |
@@ -163,23 +182,23 @@ | |
if not self.port: | |
self.port = 80 | |
elif scheme == "wss": | |
if not self.port: | |
self.port = 443 | |
- elif scheme in ('ws+unix', 'wss+unix'): | |
+ elif scheme in ("ws+unix", "wss+unix"): | |
pass | |
else: | |
raise ValueError("Invalid scheme: %s" % scheme) | |
if parsed.path: | |
resource = parsed.path | |
else: | |
resource = "/" | |
- if '+unix' in scheme: | |
+ if "+unix" in scheme: | |
self.unix_socket_path = resource | |
- resource = '/' | |
+ resource = "/" | |
if parsed.query: | |
resource += "?" + parsed.query | |
self.scheme = scheme | |
@@ -192,11 +211,11 @@ | |
``(host, port)`` depending on the initial | |
URL's scheme. | |
""" | |
return self.unix_socket_path or (self.host, self.port) | |
- def close(self, code=1000, reason=''): | |
+ def close(self, code=1000, reason=""): | |
""" | |
Initiate the closing handshake with the server. | |
""" | |
if not self.client_terminated: | |
self.client_terminated = True | |
@@ -209,17 +228,17 @@ | |
""" | |
if self.scheme == "wss": | |
# default port is now 443; upgrade self.sender to send ssl | |
self.sock = ssl.wrap_socket(self.sock, **self.ssl_options) | |
self._is_secure = True | |
- | |
+ | |
self.sock.connect(self.bind_addr) | |
self._write(self.handshake_request) | |
- response = b'' | |
- doubleCLRF = b'\r\n\r\n' | |
+ response = b"" | |
+ doubleCLRF = b"\r\n\r\n" | |
while True: | |
bytes = self.sock.recv(128) | |
if not bytes: | |
break | |
response += bytes | |
@@ -229,11 +248,11 @@ | |
if not response: | |
self.close_connection() | |
raise HandshakeError("Invalid response") | |
headers, _, body = response.partition(doubleCLRF) | |
- response_line, _, headers = headers.partition(b'\r\n') | |
+ response_line, _, headers = headers.partition(b"\r\n") | |
try: | |
self.process_response_line(response_line) | |
self.protocols, self.extensions = self.process_handshake_header(headers) | |
except HandshakeError: | |
@@ -249,58 +268,58 @@ | |
""" | |
List of headers appropriate for the upgrade | |
handshake. | |
""" | |
headers = [ | |
- ('Host', '%s:%s' % (self.host, self.port)), | |
- ('Connection', 'Upgrade'), | |
- ('Upgrade', 'websocket'), | |
- ('Sec-WebSocket-Key', self.key.decode('utf-8')), | |
- ('Sec-WebSocket-Version', str(max(WS_VERSION))) | |
- ] | |
- | |
+ ("Host", "%s:%s" % (self.host, self.port)), | |
+ ("Connection", "Upgrade"), | |
+ ("Upgrade", "websocket"), | |
+ ("Sec-WebSocket-Key", self.key.decode("utf-8")), | |
+ ("Sec-WebSocket-Version", str(max(WS_VERSION))), | |
+ ] | |
+ | |
if self.protocols: | |
- headers.append(('Sec-WebSocket-Protocol', ','.join(self.protocols))) | |
+ headers.append(("Sec-WebSocket-Protocol", ",".join(self.protocols))) | |
if self.extra_headers: | |
headers.extend(self.extra_headers) | |
- if not any(x for x in headers if x[0].lower() == 'origin'): | |
+ if not any(x for x in headers if x[0].lower() == "origin"): | |
scheme, url = self.url.split(":", 1) | |
parsed = urlsplit(url, scheme="http") | |
if parsed.hostname: | |
self.host = parsed.hostname | |
else: | |
- self.host = 'localhost' | |
- origin = scheme + '://' + self.host | |
+ self.host = "localhost" | |
+ origin = scheme + "://" + self.host | |
if parsed.port: | |
- origin = origin + ':' + str(parsed.port) | |
- headers.append(('Origin', origin)) | |
+ origin = origin + ":" + str(parsed.port) | |
+ headers.append(("Origin", origin)) | |
return headers | |
@property | |
def handshake_request(self): | |
""" | |
Prepare the request to be sent for the upgrade handshake. | |
""" | |
headers = self.handshake_headers | |
- request = [("GET %s HTTP/1.1" % self.resource).encode('utf-8')] | |
+ request = [("GET %s HTTP/1.1" % self.resource).encode("utf-8")] | |
for header, value in headers: | |
- request.append(("%s: %s" % (header, value)).encode('utf-8')) | |
- request.append(b'\r\n') | |
- | |
- return b'\r\n'.join(request) | |
+ request.append(("%s: %s" % (header, value)).encode("utf-8")) | |
+ request.append(b"\r\n") | |
+ | |
+ return b"\r\n".join(request) | |
def process_response_line(self, response_line): | |
""" | |
Ensure that we received a HTTP `101` status code in | |
response to our request and if not raises :exc:`HandshakeError`. | |
""" | |
- protocol, code, status = response_line.split(b' ', 2) | |
- if code != b'101': | |
+ protocol, code, status = response_line.split(b" ", 2) | |
+ if code != b"101": | |
raise HandshakeError("Invalid response status: %s %s" % (code, status)) | |
def process_handshake_header(self, headers): | |
""" | |
Read the upgrade handshake's response headers and | |
@@ -309,31 +328,31 @@ | |
protocols = [] | |
extensions = [] | |
headers = headers.strip() | |
- for header_line in headers.split(b'\r\n'): | |
- header, value = header_line.split(b':', 1) | |
+ for header_line in headers.split(b"\r\n"): | |
+ header, value = header_line.split(b":", 1) | |
header = header.strip().lower() | |
value = value.strip().lower() | |
- if header == b'upgrade' and value != b'websocket': | |
+ if header == b"upgrade" and value != b"websocket": | |
raise HandshakeError("Invalid Upgrade header: %s" % value) | |
- elif header == b'connection' and value != b'upgrade': | |
+ elif header == b"connection" and value != b"upgrade": | |
raise HandshakeError("Invalid Connection header: %s" % value) | |
- elif header == b'sec-websocket-accept': | |
+ elif header == b"sec-websocket-accept": | |
match = b64encode(sha1(self.key + WS_KEY).digest()) | |
if value != match.lower(): | |
raise HandshakeError("Invalid challenge response: %s" % value) | |
- elif header == b'sec-websocket-protocol': | |
- protocols = ','.join(value) | |
- | |
- elif header == b'sec-websocket-extensions': | |
- extensions = ','.join(value) | |
+ elif header == b"sec-websocket-protocol": | |
+ protocols = ",".join(value) | |
+ | |
+ elif header == b"sec-websocket-extensions": | |
+ extensions = ",".join(value) | |
return protocols, extensions | |
def handshake_ok(self): | |
self.opened() | |
--- ws4py/exc.py 2018-10-09 06:20:18.922114 +0000 | |
+++ ws4py/exc.py 2019-02-06 17:08:29.784223 +0000 | |
@@ -1,25 +1,48 @@ | |
# -*- coding: utf-8 -*- | |
-__all__ = ['WebSocketException', 'FrameTooLargeException', 'ProtocolException', | |
- 'UnsupportedFrameTypeException', 'TextFrameEncodingException', | |
- 'UnsupportedFrameTypeException', 'TextFrameEncodingException', | |
- 'StreamClosed', 'HandshakeError', 'InvalidBytesError'] | |
+__all__ = [ | |
+ "WebSocketException", | |
+ "FrameTooLargeException", | |
+ "ProtocolException", | |
+ "UnsupportedFrameTypeException", | |
+ "TextFrameEncodingException", | |
+ "UnsupportedFrameTypeException", | |
+ "TextFrameEncodingException", | |
+ "StreamClosed", | |
+ "HandshakeError", | |
+ "InvalidBytesError", | |
+] | |
-class WebSocketException(Exception): pass | |
-class ProtocolException(WebSocketException): pass | |
+class WebSocketException(Exception): | |
+ pass | |
-class FrameTooLargeException(WebSocketException): pass | |
-class UnsupportedFrameTypeException(WebSocketException): pass | |
+class ProtocolException(WebSocketException): | |
+ pass | |
-class TextFrameEncodingException(WebSocketException): pass | |
-class InvalidBytesError(WebSocketException): pass | |
+class FrameTooLargeException(WebSocketException): | |
+ pass | |
-class StreamClosed(Exception): pass | |
+ | |
+class UnsupportedFrameTypeException(WebSocketException): | |
+ pass | |
+ | |
+ | |
+class TextFrameEncodingException(WebSocketException): | |
+ pass | |
+ | |
+ | |
+class InvalidBytesError(WebSocketException): | |
+ pass | |
+ | |
+ | |
+class StreamClosed(Exception): | |
+ pass | |
+ | |
class HandshakeError(WebSocketException): | |
def __init__(self, msg): | |
self.msg = msg | |
--- ws4py/compat.py 2018-09-21 20:15:22.161481 +0000 | |
+++ ws4py/compat.py 2019-02-06 17:08:29.794528 +0000 | |
@@ -14,10 +14,11 @@ | |
import sys | |
if sys.version_info >= (3, 0): | |
py3k = True | |
from urllib.parse import urlsplit | |
+ | |
range = range | |
unicode = str | |
basestring = (bytes, str) | |
_ord = ord | |
@@ -29,13 +30,16 @@ | |
def ord(c): | |
if isinstance(c, int): | |
return c | |
return _ord(c) | |
+ | |
+ | |
else: | |
py3k = False | |
from urlparse import urlsplit | |
+ | |
range = xrange | |
unicode = unicode | |
basestring = basestring | |
ord = ord | |
--- ws4py/client/tornadoclient.py 2018-09-21 20:15:22.161391 +0000 | |
+++ ws4py/client/tornadoclient.py 2019-02-06 17:08:29.821825 +0000 | |
@@ -3,15 +3,23 @@ | |
from tornado import iostream, escape | |
from ws4py.client import WebSocketBaseClient | |
from ws4py.exc import HandshakeError | |
-__all__ = ['TornadoWebSocketClient'] | |
+__all__ = ["TornadoWebSocketClient"] | |
+ | |
class TornadoWebSocketClient(WebSocketBaseClient): | |
- def __init__(self, url, protocols=None, extensions=None, | |
- io_loop=None, ssl_options=None, headers=None): | |
+ def __init__( | |
+ self, | |
+ url, | |
+ protocols=None, | |
+ extensions=None, | |
+ io_loop=None, | |
+ ssl_options=None, | |
+ headers=None, | |
+ ): | |
""" | |
.. code-block:: python | |
from tornado import ioloop | |
@@ -29,16 +37,21 @@ | |
ws = MyClient('ws://localhost:9000/echo', protocols=['http-only', 'chat']) | |
ws.connect() | |
ioloop.IOLoop.instance().start() | |
""" | |
- WebSocketBaseClient.__init__(self, url, protocols, extensions, | |
- ssl_options=ssl_options, headers=headers) | |
+ WebSocketBaseClient.__init__( | |
+ self, url, protocols, extensions, ssl_options=ssl_options, headers=headers | |
+ ) | |
if self.scheme == "wss": | |
- self.sock = ssl.wrap_socket(self.sock, do_handshake_on_connect=False, **self.ssl_options) | |
+ self.sock = ssl.wrap_socket( | |
+ self.sock, do_handshake_on_connect=False, **self.ssl_options | |
+ ) | |
self._is_secure = True | |
- self.io = iostream.SSLIOStream(self.sock, io_loop, ssl_options=self.ssl_options) | |
+ self.io = iostream.SSLIOStream( | |
+ self.sock, io_loop, ssl_options=self.ssl_options | |
+ ) | |
else: | |
self.io = iostream.IOStream(self.sock, io_loop) | |
self.io_loop = io_loop | |
def connect(self): | |
@@ -61,28 +74,27 @@ | |
self.io.write(b) | |
def __connection_refused(self, *args, **kwargs): | |
self.server_terminated = True | |
- self.closed(1005, 'Connection refused') | |
+ self.closed(1005, "Connection refused") | |
def __send_handshake(self): | |
self.io.set_close_callback(self.__connection_closed) | |
- self.io.write(escape.utf8(self.handshake_request), | |
- self.__handshake_sent) | |
+ self.io.write(escape.utf8(self.handshake_request), self.__handshake_sent) | |
def __connection_closed(self, *args, **kwargs): | |
self.server_terminated = True | |
- self.closed(1006, 'Connection closed during handshake') | |
+ self.closed(1006, "Connection closed during handshake") | |
def __handshake_sent(self): | |
self.io.read_until(b"\r\n\r\n", self.__handshake_completed) | |
def __handshake_completed(self, data): | |
self.io.set_close_callback(None) | |
try: | |
- response_line, _, headers = data.partition(b'\r\n') | |
+ response_line, _, headers = data.partition(b"\r\n") | |
self.process_response_line(response_line) | |
protocols, extensions = self.process_handshake_header(headers) | |
except HandshakeError: | |
self.close_connection() | |
raise | |
@@ -124,11 +136,12 @@ | |
""" | |
Close the underlying connection | |
""" | |
self.io.close() | |
-if __name__ == '__main__': | |
+ | |
+if __name__ == "__main__": | |
from tornado import ioloop | |
class MyClient(TornadoWebSocketClient): | |
def opened(self): | |
def data_provider(): | |
@@ -147,10 +160,10 @@ | |
def closed(self, code, reason=None): | |
ioloop.IOLoop.instance().stop() | |
print(("Closed down", code, reason)) | |
- ws = MyClient('ws://localhost:9000/ws', protocols=['http-only', 'chat']) | |
+ ws = MyClient("ws://localhost:9000/ws", protocols=["http-only", "chat"]) | |
ws.connect() | |
ioloop.IOLoop.instance().start() | |
--- ws4py/messaging.py 2018-09-21 20:15:22.162133 +0000 | |
+++ ws4py/messaging.py 2019-02-06 17:08:29.992808 +0000 | |
@@ -1,18 +1,32 @@ | |
# -*- coding: utf-8 -*- | |
import os | |
import struct | |
-from ws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \ | |
- OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG | |
+from ws4py.framing import ( | |
+ Frame, | |
+ OPCODE_CONTINUATION, | |
+ OPCODE_TEXT, | |
+ OPCODE_BINARY, | |
+ OPCODE_CLOSE, | |
+ OPCODE_PING, | |
+ OPCODE_PONG, | |
+) | |
from ws4py.compat import unicode, py3k | |
-__all__ = ['Message', 'TextMessage', 'BinaryMessage', 'CloseControlMessage', | |
- 'PingControlMessage', 'PongControlMessage'] | |
+__all__ = [ | |
+ "Message", | |
+ "TextMessage", | |
+ "BinaryMessage", | |
+ "CloseControlMessage", | |
+ "PingControlMessage", | |
+ "PongControlMessage", | |
+] | |
+ | |
class Message(object): | |
- def __init__(self, opcode, data=b'', encoding='utf-8'): | |
+ def __init__(self, opcode, data=b"", encoding="utf-8"): | |
""" | |
A message is a application level entity. It's usually built | |
from one or many frames. The protocol defines several kind | |
of messages which are grouped into two sets: | |
@@ -49,12 +63,13 @@ | |
If ``mask`` is set, automatically mask the frame | |
using a generated 4-byte token. | |
""" | |
mask = os.urandom(4) if mask else None | |
- return Frame(body=self.data, opcode=self.opcode, | |
- masking_key=mask, fin=1).build() | |
+ return Frame( | |
+ body=self.data, opcode=self.opcode, masking_key=mask, fin=1 | |
+ ).build() | |
def fragment(self, first=False, last=False, mask=False): | |
""" | |
Returns a :class:`ws4py.framing.Frame` bytes. | |
@@ -65,13 +80,11 @@ | |
* ``mask``: the frame is masked using a automatically generated 4-byte token | |
""" | |
fin = 1 if last is True else 0 | |
opcode = self.opcode if first is True else OPCODE_CONTINUATION | |
mask = os.urandom(4) if mask else None | |
- return Frame(body=self.data, | |
- opcode=opcode, masking_key=mask, | |
- fin=fin).build() | |
+ return Frame(body=self.data, opcode=opcode, masking_key=mask, fin=fin).build() | |
@property | |
def completed(self): | |
""" | |
Indicates the the message is complete, meaning | |
@@ -109,10 +122,11 @@ | |
return self.data | |
def __unicode__(self): | |
return self.data.decode(self.encoding) | |
+ | |
class TextMessage(Message): | |
def __init__(self, text=None): | |
Message.__init__(self, OPCODE_TEXT, text) | |
@property | |
@@ -120,10 +134,11 @@ | |
return False | |
@property | |
def is_text(self): | |
return True | |
+ | |
class BinaryMessage(Message): | |
def __init__(self, bytes=None): | |
Message.__init__(self, OPCODE_BINARY, bytes, encoding=None) | |
@@ -136,35 +151,38 @@ | |
return False | |
def __len__(self): | |
return len(self.data) | |
+ | |
class CloseControlMessage(Message): | |
- def __init__(self, code=1000, reason=''): | |
+ def __init__(self, code=1000, reason=""): | |
data = b"" | |
if code: | |
data += struct.pack("!H", code) | |
if reason is not None: | |
if isinstance(reason, unicode): | |
- reason = reason.encode('utf-8') | |
+ reason = reason.encode("utf-8") | |
data += reason | |
- Message.__init__(self, OPCODE_CLOSE, data, 'utf-8') | |
+ Message.__init__(self, OPCODE_CLOSE, data, "utf-8") | |
self.code = code | |
self.reason = reason | |
def __str__(self): | |
if py3k: | |
- return self.reason.decode('utf-8') | |
+ return self.reason.decode("utf-8") | |
return self.reason | |
def __unicode__(self): | |
return self.reason.decode(self.encoding) | |
+ | |
class PingControlMessage(Message): | |
def __init__(self, data=None): | |
Message.__init__(self, OPCODE_PING, data) | |
+ | |
class PongControlMessage(Message): | |
def __init__(self, data): | |
Message.__init__(self, OPCODE_PONG, data) | |
--- internal/support/devtools_parser.py 2019-01-08 01:37:06.393719 +0000 | |
+++ internal/support/devtools_parser.py 2019-02-06 17:08:29.994196 +0000 | |
@@ -25,22 +25,26 @@ | |
try: | |
import ujson as json | |
except BaseException: | |
import json | |
+ | |
class DevToolsParser(object): | |
"""Main class""" | |
+ | |
def __init__(self, options): | |
- self.devtools_file = options['devtools'] | |
- self.netlog_requests_file = options['netlog'] if 'netlog' in options else None | |
- self.optimization = options['optimization'] if 'optimization' in options else None | |
- self.user_timing_file = options['user'] if 'user' in options else None | |
- self.coverage = options['coverage'] if 'coverage' in options else None | |
- self.cpu_times = options['cpu'] if 'cpu' in options else None | |
- self.cached = options['cached'] if 'cached' in options else False | |
- self.out_file = options['out'] | |
- self.result = {'pageData': {}, 'requests': []} | |
+ self.devtools_file = options["devtools"] | |
+ self.netlog_requests_file = options["netlog"] if "netlog" in options else None | |
+ self.optimization = ( | |
+ options["optimization"] if "optimization" in options else None | |
+ ) | |
+ self.user_timing_file = options["user"] if "user" in options else None | |
+ self.coverage = options["coverage"] if "coverage" in options else None | |
+ self.cpu_times = options["cpu"] if "cpu" in options else None | |
+ self.cached = options["cached"] if "cached" in options else False | |
+ self.out_file = options["out"] | |
+ self.result = {"pageData": {}, "requests": []} | |
self.request_ids = {} | |
def process(self): | |
"""Main entry point for processing""" | |
logging.debug("Processing raw devtools events") | |
@@ -88,522 +92,713 @@ | |
pass | |
def write(self): | |
"""Write out the resulting json data""" | |
if self.out_file is not None: | |
- if len(self.result['pageData']) or len(self.result['requests']): | |
+ if len(self.result["pageData"]) or len(self.result["requests"]): | |
try: | |
_, ext = os.path.splitext(self.out_file) | |
- if ext.lower() == '.gz': | |
- with gzip.open(self.out_file, 'wb') as f_out: | |
+ if ext.lower() == ".gz": | |
+ with gzip.open(self.out_file, "wb") as f_out: | |
json.dump(self.result, f_out) | |
else: | |
- with open(self.out_file, 'w') as f_out: | |
+ with open(self.out_file, "w") as f_out: | |
json.dump(self.result, f_out) | |
except Exception: | |
logging.critical("Error writing to " + self.out_file) | |
def extract_net_requests(self): | |
"""Load the events we are interested in""" | |
has_request_headers = False | |
net_requests = [] | |
- page_data = {'endTime': 0} | |
+ page_data = {"endTime": 0} | |
_, ext = os.path.splitext(self.devtools_file) | |
- if ext.lower() == '.gz': | |
- f_in = gzip.open(self.devtools_file, 'rb') | |
+ if ext.lower() == ".gz": | |
+ f_in = gzip.open(self.devtools_file, "rb") | |
else: | |
- f_in = open(self.devtools_file, 'r') | |
+ f_in = open(self.devtools_file, "r") | |
raw_events = json.load(f_in) | |
# sort all of the events by timestamp | |
if len(raw_events): | |
- raw_events.sort(key=lambda x: x['params']['timestamp'] if \ | |
- ('params' in x and 'timestamp' in x['params']) else 9999999) | |
+ raw_events.sort( | |
+ key=lambda x: x["params"]["timestamp"] | |
+ if ("params" in x and "timestamp" in x["params"]) | |
+ else 9999999 | |
+ ) | |
f_in.close() | |
if raw_events is not None and len(raw_events): | |
end_timestamp = None | |
first_timestamp = None | |
raw_requests = {} | |
id_map = {} | |
for raw_event in raw_events: | |
- if 'method' in raw_event and 'params' in raw_event: | |
- method = raw_event['method'] | |
- params = raw_event['params'] | |
+ if "method" in raw_event and "params" in raw_event: | |
+ method = raw_event["method"] | |
+ params = raw_event["params"] | |
request_id = None | |
original_id = None | |
- if 'requestId' in params: | |
- request_id = params['requestId'] | |
+ if "requestId" in params: | |
+ request_id = params["requestId"] | |
original_id = request_id | |
if request_id in id_map: | |
- request_id += '-' + str(id_map[request_id]) | |
+ request_id += "-" + str(id_map[request_id]) | |
# Handle the events without timestamps (which will be sorted to the end) | |
- if method == 'Page.frameNavigated' and 'frame' in params and \ | |
- 'id' in params['frame'] and 'parentId' not in params['frame']: | |
- page_data['main_frame'] = params['frame']['id'] | |
- if method == 'Network.requestServedFromCache' and 'requestId' in params and \ | |
- request_id is not None and request_id in raw_requests: | |
- raw_requests[request_id]['fromNet'] = False | |
- raw_requests[request_id]['fromCache'] = True | |
+ if ( | |
+ method == "Page.frameNavigated" | |
+ and "frame" in params | |
+ and "id" in params["frame"] | |
+ and "parentId" not in params["frame"] | |
+ ): | |
+ page_data["main_frame"] = params["frame"]["id"] | |
+ if ( | |
+ method == "Network.requestServedFromCache" | |
+ and "requestId" in params | |
+ and request_id is not None | |
+ and request_id in raw_requests | |
+ ): | |
+ raw_requests[request_id]["fromNet"] = False | |
+ raw_requests[request_id]["fromCache"] = True | |
# Adjust all of the timestamps to be relative to the start of navigation | |
# and in milliseconds | |
- if first_timestamp is None and 'timestamp' in params and \ | |
- method == 'Network.requestWillBeSent': | |
- first_timestamp = params['timestamp'] | |
- if first_timestamp is not None and 'timestamp' in params: | |
- if params['timestamp'] >= first_timestamp: | |
- params['timestamp'] -= first_timestamp | |
- params['timestamp'] *= 1000.0 | |
+ if ( | |
+ first_timestamp is None | |
+ and "timestamp" in params | |
+ and method == "Network.requestWillBeSent" | |
+ ): | |
+ first_timestamp = params["timestamp"] | |
+ if first_timestamp is not None and "timestamp" in params: | |
+ if params["timestamp"] >= first_timestamp: | |
+ params["timestamp"] -= first_timestamp | |
+ params["timestamp"] *= 1000.0 | |
else: | |
continue | |
- if method == 'Page.loadEventFired' and 'timestamp' in params and \ | |
- ('onload' not in page_data or | |
- params['timestamp'] > page_data['onload']): | |
- page_data['onload'] = params['timestamp'] | |
- if 'timestamp' in params and request_id is not None: | |
- timestamp = params['timestamp'] | |
- if method == 'Network.requestWillBeSent' and 'request' in params and \ | |
- 'url' in params['request'] and \ | |
- params['request']['url'][:4] == 'http': | |
- request = params['request'] | |
- request['raw_id'] = original_id | |
- request['startTime'] = timestamp | |
- if 'frameId' in params: | |
- request['frame_id'] = params['frameId'] | |
- elif 'main_frame' in page_data: | |
- request['frame_id'] = page_data['main_frame'] | |
- if 'initiator' in params: | |
- request['initiator'] = params['initiator'] | |
+ if ( | |
+ method == "Page.loadEventFired" | |
+ and "timestamp" in params | |
+ and ( | |
+ "onload" not in page_data | |
+ or params["timestamp"] > page_data["onload"] | |
+ ) | |
+ ): | |
+ page_data["onload"] = params["timestamp"] | |
+ if "timestamp" in params and request_id is not None: | |
+ timestamp = params["timestamp"] | |
+ if ( | |
+ method == "Network.requestWillBeSent" | |
+ and "request" in params | |
+ and "url" in params["request"] | |
+ and params["request"]["url"][:4] == "http" | |
+ ): | |
+ request = params["request"] | |
+ request["raw_id"] = original_id | |
+ request["startTime"] = timestamp | |
+ if "frameId" in params: | |
+ request["frame_id"] = params["frameId"] | |
+ elif "main_frame" in page_data: | |
+ request["frame_id"] = page_data["main_frame"] | |
+ if "initiator" in params: | |
+ request["initiator"] = params["initiator"] | |
# Redirects re-use the same ID so we need to fake a new request | |
if request_id in raw_requests: | |
- if 'redirectResponse' in params: | |
- if 'endTime' not in raw_requests[request_id] or \ | |
- timestamp > raw_requests[request_id]['endTime']: | |
- raw_requests[request_id]['endTime'] = timestamp | |
- if 'firstByteTime' not in raw_requests[request_id]: | |
- raw_requests[request_id]['firstByteTime'] = timestamp | |
+ if "redirectResponse" in params: | |
+ if ( | |
+ "endTime" not in raw_requests[request_id] | |
+ or timestamp | |
+ > raw_requests[request_id]["endTime"] | |
+ ): | |
+ raw_requests[request_id]["endTime"] = timestamp | |
+ if "firstByteTime" not in raw_requests[request_id]: | |
+ raw_requests[request_id][ | |
+ "firstByteTime" | |
+ ] = timestamp | |
# iOS incorrectly sets the fromNet flag to false for resources | |
# from cache but it doesn't have any send headers for those | |
# requests so use that as an indicator. | |
- raw_requests[request_id]['fromNet'] = False | |
- if 'fromDiskCache' in params['redirectResponse'] and \ | |
- not params['redirectResponse']['fromDiskCache'] and \ | |
- 'headers' in raw_requests[request_id] and \ | |
- len(raw_requests[request_id]['headers']): | |
- raw_requests[request_id]['fromNet'] = True | |
- raw_requests[request_id]['response'] = \ | |
- params['redirectResponse'] | |
+ raw_requests[request_id]["fromNet"] |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment