Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
--- ./demo.py (original)
+++ ./demo.py (refactored)
@@ -1,4 +1,4 @@
-from automation import TaskManager, CommandSequence
+from .automation import TaskManager, CommandSequence
# The list of sites that we wish to crawl
NUM_BROWSERS = 3
@@ -10,7 +10,7 @@
manager_params, browser_params = TaskManager.load_default_params(NUM_BROWSERS)
# Update browser configuration (use this for per-browser settings)
-for i in xrange(NUM_BROWSERS):
+for i in range(NUM_BROWSERS):
browser_params[i]['disable_flash'] = False #Enable flash for all three browsers
browser_params[0]['headless'] = True #Launch only browser 0 headless
--- ./automation/BrowserManager.py (original)
+++ ./automation/BrowserManager.py (refactored)
@@ -1,19 +1,19 @@
-from Commands import command_executor
-from DeployBrowsers import deploy_browser
-from Commands import profile_commands
-from Proxy import deploy_mitm_proxy
-from SocketInterface import clientsocket
-from MPLogger import loggingclient
-from Errors import ProfileLoadError, BrowserConfigError, BrowserCrashError
+from .Commands import command_executor
+from .DeployBrowsers import deploy_browser
+from .Commands import profile_commands
+from .Proxy import deploy_mitm_proxy
+from .SocketInterface import clientsocket
+from .MPLogger import loggingclient
+from .Errors import ProfileLoadError, BrowserConfigError, BrowserCrashError
from multiprocess import Process, Queue
-from Queue import Empty as EmptyQueue
+from queue import Empty as EmptyQueue
from tblib import pickling_support
pickling_support.install()
from six import reraise
import traceback
import tempfile
-import cPickle
+import pickle
import shutil
import signal
import time
@@ -101,7 +101,7 @@
launch_status[result[1]] = True
return result[2]
elif result[0] == 'CRITICAL':
- reraise(*cPickle.loads(result[1]))
+ reraise(*pickle.loads(result[1]))
elif result[0] == 'FAILED':
raise BrowserCrashError('Browser spawn returned failure status')
@@ -138,7 +138,7 @@
error_string += " | %s: %s " % (string, launch_status.get(string, False))
self.logger.error("BROWSER %i: Spawn unsuccessful %s" % (self.crawl_id, error_string))
self.kill_browser_manager()
- if launch_status.has_key('Profile Created'):
+ if 'Profile Created' in launch_status:
shutil.rmtree(spawned_profile_path, ignore_errors=True)
# If the browser spawned successfully, we should update the
@@ -302,7 +302,7 @@
logger.info("BROWSER %i: %s thrown, informing parent and raising" %
(browser_params['crawl_id'], e.__class__.__name__))
err_info = sys.exc_info()
- status_queue.put(('CRITICAL',cPickle.dumps(err_info)))
+ status_queue.put(('CRITICAL',pickle.dumps(err_info)))
return
except Exception as e:
excp = traceback.format_exception(*sys.exc_info())
--- ./automation/CommandSequence.py (original)
+++ ./automation/CommandSequence.py (refactored)
@@ -1,4 +1,4 @@
-from Errors import CommandExecutionError
+from .Errors import CommandExecutionError
class CommandSequence:
"""A CommandSequence wraps a series of commands to be performed
--- ./automation/MPLogger.py (original)
+++ ./automation/MPLogger.py (refactored)
@@ -1,7 +1,7 @@
""" Support for logging with the multiprocessing module """
-from SocketInterface import serversocket
+from .SocketInterface import serversocket
-from Queue import Empty as EmptyQueue
+from queue import Empty as EmptyQueue
import logging.handlers
import logging
import struct
@@ -135,4 +135,4 @@
# Close the logging server
status_queue.put('DIE')
loggingserver.join()
- print "Server closed, exiting..."
+ print("Server closed, exiting...")
--- ./automation/SocketInterface.py (original)
+++ ./automation/SocketInterface.py (refactored)
@@ -1,4 +1,4 @@
-import Queue
+import queue
import threading
import traceback
import socket
@@ -19,9 +19,9 @@
self.sock.bind(('localhost', 0))
self.sock.listen(10) # queue a max of n connect requests
self.verbose = verbose
- self.queue = Queue.Queue()
+ self.queue = queue.Queue()
if self.verbose:
- print "Server bound to: " + str(self.sock.getsockname())
+ print("Server bound to: " + str(self.sock.getsockname()))
def start_accepting(self):
""" Start the listener thread """
@@ -49,13 +49,13 @@
'j' : json
"""
if self.verbose:
- print "Thread: " + str(threading.current_thread()) + " connected to: " + str(address)
+ print("Thread: " + str(threading.current_thread()) + " connected to: " + str(address))
try:
while True:
msg = self.receive_msg(client, 5)
msglen, serialization = struct.unpack('>Lc', msg)
if self.verbose:
- print "Msglen: " + str(msglen) + " is_serialized: " + str(serialization != 'n')
+ print("Msglen: " + str(msglen) + " is_serialized: " + str(serialization != 'n'))
msg = self.receive_msg(client, msglen)
if serialization != 'n':
try:
@@ -66,27 +66,27 @@
msg = json.loads(msg)
except UnicodeDecodeError:
try:
- msg = json.loads(unicode(msg, 'ISO-8859-1', 'ignore'))
+ msg = json.loads(str(msg, 'ISO-8859-1', 'ignore'))
except ValueError:
- print "****** Unrecognized character encoding during de-serialization."
+ print("****** Unrecognized character encoding during de-serialization.")
continue
except ValueError as e:
try:
- msg = json.loads(unicode(msg, 'utf-8', 'ignore'))
+ msg = json.loads(str(msg, 'utf-8', 'ignore'))
except ValueError:
- print "****** Unrecognized character encoding during de-serialization."
+ print("****** Unrecognized character encoding during de-serialization.")
continue
else:
- print "Unrecognized serialization type: %s" % serialization
+ print("Unrecognized serialization type: %s" % serialization)
continue
except (UnicodeDecodeError, ValueError) as e:
- print "Error de-serializing message: %s \n %s" % (
- msg, traceback.format_exc(e))
+ print("Error de-serializing message: %s \n %s" % (
+ msg, traceback.format_exc(e)))
continue
self.queue.put(msg)
except RuntimeError:
if self.verbose:
- print "Client socket: " + str(address) + " closed"
+ print("Client socket: " + str(address) + " closed")
def receive_msg(self, client, msglen):
msg = ''
@@ -115,7 +115,7 @@
self.verbose = verbose
def connect(self, host, port):
- if self.verbose: print "Connecting to: %s:%i" % (host, port)
+ if self.verbose: print("Connecting to: %s:%i" % (host, port))
self.sock.connect((host, port))
def send(self, msg):
@@ -137,7 +137,7 @@
else:
serialization = 'n'
- if self.verbose: print "Sending message with serialization %s" % serialization
+ if self.verbose: print("Sending message with serialization %s" % serialization)
#prepend with message length
msg = struct.pack('>Lc', len(msg), serialization) + msg
@@ -158,12 +158,12 @@
if sys.argv[1] == 's':
sock = serversocket(verbose=True)
sock.start_accepting()
- raw_input("Press enter to exit...")
+ input("Press enter to exit...")
sock.close()
elif sys.argv[1] == 'c':
- host = raw_input("Enter the host name:\n")
- port = raw_input("Enter the port:\n")
- serialization = raw_input("Enter the serialization type (default: 'json'):\n")
+ host = input("Enter the host name:\n")
+ port = input("Enter the port:\n")
+ serialization = input("Enter the serialization type (default: 'json'):\n")
if serialization == '':
serialization = 'json'
sock = clientsocket(serialization=serialization)
@@ -178,7 +178,7 @@
# read user input
while msg != "quit":
- msg = raw_input("Enter a message to send:\n")
+ msg = input("Enter a message to send:\n")
if msg == 'tuple':
sock.send(tuple_msg)
elif msg == 'list':
--- ./automation/TaskManager.py (original)
+++ ./automation/TaskManager.py (refactored)
@@ -1,14 +1,14 @@
-from BrowserManager import Browser
-from DataAggregator import DataAggregator, LevelDBAggregator
-from SocketInterface import clientsocket
-from PostProcessing import post_processing
-from Errors import CommandExecutionError
-from platform_utils import get_version, get_configuration_string
-import CommandSequence
-import MPLogger
+from .BrowserManager import Browser
+from .DataAggregator import DataAggregator, LevelDBAggregator
+from .SocketInterface import clientsocket
+from .PostProcessing import post_processing
+from .Errors import CommandExecutionError
+from .platform_utils import get_version, get_configuration_string
+from . import CommandSequence
+from . import MPLogger
from multiprocess import Process, Queue
-from Queue import Empty as EmptyQueue
+from queue import Empty as EmptyQueue
import threading
import copy
import os
@@ -28,7 +28,7 @@
fp = open(os.path.join(os.path.dirname(__file__), 'default_browser_params.json'))
preferences = json.load(fp)
fp.close()
- browser_params = [copy.deepcopy(preferences) for i in xrange(0, num_browsers)]
+ browser_params = [copy.deepcopy(preferences) for i in range(0, num_browsers)]
fp = open(os.path.join(os.path.dirname(__file__), 'default_manager_params.json'))
manager_params = json.load(fp)
@@ -150,7 +150,7 @@
self.task_id = cur.lastrowid
# Record browser details for each brower
- for i in xrange(self.num_browsers):
+ for i in range(self.num_browsers):
cur.execute("INSERT INTO crawl (task_id, browser_params) VALUES (?,?)",
(self.task_id, json.dumps(browser_params[i])))
self.db.commit()
@@ -164,7 +164,7 @@
def _initialize_browsers(self, browser_params):
""" initialize the browser classes, each its unique set of parameters """
browsers = list()
- for i in xrange(self.num_browsers):
+ for i in range(self.num_browsers):
browsers.append(Browser(self.manager_params, browser_params[i]))
return browsers
@@ -354,7 +354,7 @@
#send the command to all browsers
command_executed = [False] * len(self.browsers)
while False in command_executed:
- for i in xrange(len(self.browsers)):
+ for i in range(len(self.browsers)):
if self.browsers[i].ready() and not command_executed[i]:
self.browsers[i].current_timeout = command_sequence.total_timeout
self._start_thread(self.browsers[i], command_sequence)
@@ -365,7 +365,7 @@
condition = threading.Condition() # Used to block threads until ready
command_executed = [False] * len(self.browsers)
while False in command_executed:
- for i in xrange(len(self.browsers)):
+ for i in range(len(self.browsers)):
if self.browsers[i].ready() and not command_executed[i]:
self.browsers[i].current_timeout = command_sequence.total_timeout
self._start_thread(self.browsers[i], command_sequence, condition)
--- ./automation/platform_utils.py (original)
+++ ./automation/platform_utils.py (refactored)
@@ -65,14 +65,14 @@
# Copy items in sorted order
dct = OrderedDict()
- dct[u'crawl_id'] = crawl_id
+ dct['crawl_id'] = crawl_id
for key in sorted(item.keys()):
dct[key] = item[key]
table_input.append(dct)
key_dict = OrderedDict()
counter = 0
- for key in table_input[0].keys():
+ for key in list(table_input[0].keys()):
key_dict[key] = counter
counter += 1
config_str += "Keys:\n"
@@ -133,16 +133,16 @@
# Force pre-loading so we don't allow some ads through
fp.set_preference('extensions.adblockplus.please_kill_startup_performance', True)
- print "Starting webdriver with AdBlockPlus activated"
+ print("Starting webdriver with AdBlockPlus activated")
driver = webdriver.Firefox(firefox_profile = fp, firefox_binary = fb)
- print "Sleeping %i seconds to give the list time to download" % wait_time
+ print("Sleeping %i seconds to give the list time to download" % wait_time)
time.sleep(wait_time)
if not os.path.isdir(output_directory):
- print "Output directory %s does not exist, creating." % output_directory
+ print("Output directory %s does not exist, creating." % output_directory)
os.makedirs(output_directory)
- print "Copying blocklists to %s" % output_directory
+ print("Copying blocklists to %s" % output_directory)
try:
shutil.copy(browser_path+'adblockplus/patterns.ini', output_directory)
shutil.copy(browser_path+'adblockplus/elemhide.css', output_directory)
--- ./automation/Commands/browser_commands.py (original)
+++ ./automation/Commands/browser_commands.py (refactored)
@@ -10,9 +10,9 @@
from ..SocketInterface import clientsocket
from ..MPLogger import loggingclient
-from utils.lso import get_flash_cookies
-from utils.firefox_profile import get_cookies # todo: add back get_localStorage,
-from utils.webdriver_extensions import scroll_down, wait_until_loaded, get_intra_links
+from .utils.lso import get_flash_cookies
+from .utils.firefox_profile import get_cookies # todo: add back get_localStorage,
+from .utils.webdriver_extensions import scroll_down, wait_until_loaded, get_intra_links
# Library for core WebDriver-based browser commands
@@ -158,7 +158,7 @@
# Then visit a few subpages
for i in range(num_links):
links = get_intra_links(webdriver, url)
- links = filter(lambda x: x.is_displayed() == True, links)
+ links = [x for x in links if x.is_displayed() == True]
if len(links) == 0:
break
r = int(random.random()*len(links))
--- ./automation/Commands/command_executor.py (original)
+++ ./automation/Commands/command_executor.py (refactored)
@@ -1,5 +1,5 @@
-import browser_commands
-import profile_commands
+from . import browser_commands
+from . import profile_commands
def execute_command(command, webdriver, proxy_queue, browser_settings, browser_params, manager_params, extension_socket):
--- ./automation/Commands/profile_commands.py (original)
+++ ./automation/Commands/profile_commands.py (refactored)
@@ -1,13 +1,13 @@
import tarfile
-import cPickle
+import pickle
import shutil
import sys
import os
from ..Errors import ProfileLoadError
from ..MPLogger import loggingclient
-from utils.firefox_profile import sleep_until_sqlite_checkpoint
-from utils.file_utils import rmsubtree
+from .utils.firefox_profile import sleep_until_sqlite_checkpoint
+from .utils.file_utils import rmsubtree
# Flash Plugin Storage Location -- Linux ONLY
HOME = os.path.expanduser('~')
@@ -25,13 +25,13 @@
os.remove(location + 'browser_settings.p')
with open(location + 'browser_settings.p', 'wb') as f:
- cPickle.dump(browser_settings, f)
+ pickle.dump(browser_settings, f)
def load_browser_settings(location):
""" loads the browser settings from a pickled dictionary in <location>"""
try:
with open(location + 'browser_settings.p', 'rb') as f:
- browser_settings = cPickle.load(f)
+ browser_settings = pickle.load(f)
except IOError:
browser_settings = None
return browser_settings
--- ./automation/Commands/utils/XPathUtil.py (original)
+++ ./automation/Commands/utils/XPathUtil.py (refactored)
@@ -107,7 +107,7 @@
attr = 'normalize-space(' + attr + ')'
if len(parts) != 2:
- print "ERROR: This function is meant to support 1 wildcard"
+ print("ERROR: This function is meant to support 1 wildcard")
return '[' + attr + '=' + string + ']'
else:
pt1 = ''
@@ -127,23 +127,23 @@
elif pt1 != '' and pt2 != '':
return ('[' + pt1 + ' and ' + pt2 + ']')
else:
- print "ERROR: The string is empty"
+ print("ERROR: The string is empty")
return '[' + attr + '=' + string + ']'
if __name__=='__main__':
#Output some sample XPaths
- print "--- Sample XPaths ---"
- import urllib2
+ print("--- Sample XPaths ---")
+ import urllib.request, urllib.error, urllib.parse
import re
from random import choice
- rsp = urllib2.urlopen('http://www.reddit.com/')
+ rsp = urllib.request.urlopen('http://www.reddit.com/')
if rsp.getcode() == 200:
soup = bs(rsp.read(), 'lxml')
elements = soup.findAll(text = re.compile('[A-Za-z0-9]{10,}'))
for i in range(0,5):
element = choice(elements).parent
- print "HTML"
- print element
- print "XPath"
- print ExtractXPath(element)
- print "**************"
+ print("HTML")
+ print(element)
+ print("XPath")
+ print(ExtractXPath(element))
+ print("**************")
--- ./automation/Commands/utils/domain_utils.py (original)
+++ ./automation/Commands/utils/domain_utils.py (refactored)
@@ -1,6 +1,6 @@
from publicsuffix import PublicSuffixList, fetch
from ipaddress import ip_address
-from urlparse import urlparse
+from urllib.parse import urlparse
from functools import wraps
import tempfile
import codecs
@@ -14,18 +14,18 @@
Grabs an updated public suffix list.
"""
if not os.path.isfile(PSL_CACHE_LOC):
- print "%s does not exist, downloading a copy." % PSL_CACHE_LOC
+ print("%s does not exist, downloading a copy." % PSL_CACHE_LOC)
psl_file = fetch()
with codecs.open(PSL_CACHE_LOC, 'w', encoding='utf8') as f:
f.write(psl_file.read())
- print "Using psl from cache: %s" % PSL_CACHE_LOC
+ print("Using psl from cache: %s" % PSL_CACHE_LOC)
psl_cache = codecs.open(PSL_CACHE_LOC, encoding='utf8')
return PublicSuffixList(psl_cache)
def load_psl(function):
@wraps(function)
def wrapper(*args, **kwargs):
- if not kwargs.has_key('psl'):
+ if 'psl' not in kwargs:
if wrapper.psl is None:
wrapper.psl = get_psl()
return function(*args, psl=wrapper.psl, **kwargs)
@@ -39,7 +39,7 @@
Check if the given string is a valid IP address
"""
try:
- ip_address(unicode(hostname))
+ ip_address(str(hostname))
return True
except ValueError:
return False
@@ -54,7 +54,7 @@
An (optional) PublicSuffixList object can be passed with keyword arg 'psl',
otherwise a version cached in the system temp directory is used.
"""
- if not kwargs.has_key('psl'):
+ if 'psl' not in kwargs:
raise ValueError("A PublicSuffixList must be passed as a keyword argument.")
hostname = urlparse(url).hostname
if is_ip_address(hostname):
@@ -81,7 +81,7 @@
An (optional) PublicSuffixList object can be passed with keyword arg 'psl'.
otherwise a version cached in the system temp directory is used.
"""
- if not kwargs.has_key('psl'):
+ if 'psl' not in kwargs:
raise ValueError("A PublicSuffixList must be passed as a keyword argument.")
hostname = urlparse(url).hostname
--- ./automation/Commands/utils/firefox_profile.py (original)
+++ ./automation/Commands/utils/firefox_profile.py (refactored)
@@ -19,14 +19,14 @@
while (timeout > 0 and tmp_sqlite_files_exist(profile_dir)):
time.sleep(1)
timeout -= 1
- print "Waited for %s seconds for sqlite checkpointing" % (60 - timeout)
+ print("Waited for %s seconds for sqlite checkpointing" % (60 - timeout))
def get_localStorage(profile_directory, mod_since):
#TODO how to support modified since???
ff_ls_file = os.path.join(profile_directory, 'webappsstore.sqlite')
if not os.path.isfile(ff_ls_file):
- print "Cannot find localstorage DB %s" % ff_ls_file
+ print("Cannot find localstorage DB %s" % ff_ls_file)
else:
conn = sqlite3.connect(ff_ls_file)
with conn:
@@ -40,7 +40,7 @@
def get_cookies(profile_directory, mod_since):
cookie_db = os.path.join(profile_directory, 'cookies.sqlite')
if not os.path.isfile(cookie_db):
- print "cannot find cookie.db", cookie_db
+ print("cannot find cookie.db", cookie_db)
else:
conn = sqlite3.connect(cookie_db)
with conn:
--- ./automation/Commands/utils/lso.py (original)
+++ ./automation/Commands/utils/lso.py (refactored)
@@ -36,26 +36,26 @@
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
- print "Exception reading", lso_file
- print e
+ print("Exception reading", lso_file)
+ print(e)
pass
return flash_cookies
def parse_flash_cookies(lso_file):
lso_dict = sol.load(lso_file)
flash_cookies = list()
- for k, v in lso_dict.iteritems():
+ for k, v in lso_dict.items():
flash_cookie = FlashCookie()
flash_cookie.local_path = lso_file.split("#SharedObjects/")[1]
flash_cookie.filename = os.path.basename(lso_file)
flash_cookie.domain = lso_file.split("#SharedObjects/")[1].split("/")[1]
- flash_cookie.key = unicode(k)
+ flash_cookie.key = str(k)
try:
- flash_cookie.content = unicode(v)
+ flash_cookie.content = str(v)
except UnicodeDecodeError:
# obj is byte string
ascii_text = str(v).encode('string_escape')
- flash_cookie.content = unicode(ascii_text)
+ flash_cookie.content = str(ascii_text)
flash_cookies.append(flash_cookie)
return flash_cookies
--- ./automation/Commands/utils/webdriver_extensions.py (original)
+++ ./automation/Commands/utils/webdriver_extensions.py (refactored)
@@ -5,12 +5,12 @@
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import ElementNotVisibleException
from selenium.common.exceptions import NoSuchElementException
-from urlparse import urljoin
+from urllib.parse import urljoin
import random
import time
-import domain_utils as du
-import XPathUtil
+from . import domain_utils as du
+from . import XPathUtil
#### Basic functions
def scroll_down(driver):
@@ -37,9 +37,8 @@
def get_intra_links(webdriver, url):
ps1 = du.get_ps_plus_1(url)
- links = filter(lambda x: (x.get_attribute("href") and
- du.get_ps_plus_1(urljoin(url, x.get_attribute("href"))) == ps1),
- webdriver.find_elements_by_tag_name("a"))
+ links = [x for x in webdriver.find_elements_by_tag_name("a") if (x.get_attribute("href") and
+ du.get_ps_plus_1(urljoin(url, x.get_attribute("href"))) == ps1)]
return links
##### Search/Block Functions
@@ -67,7 +66,7 @@
#If we get here, search also fails in iframes
driver.switch_to_default_content()
- raise NoSuchElementException, "Element not found during wait_and_find"
+ raise NoSuchElementException("Element not found during wait_and_find")
def is_found(driver, locator_type, locator, timeout=3):
try:
--- ./automation/DataAggregator/DataAggregator.py (original)
+++ ./automation/DataAggregator/DataAggregator.py (refactored)
@@ -5,6 +5,7 @@
import sqlite3
import time
import os
+import collections
def DataAggregator(manager_params, status_queue, commit_batch_size=1000):
@@ -73,14 +74,14 @@
query is of form (template_string, arguments)
"""
if len(query) != 2:
- print "ERROR: Query is not the correct length"
+ print("ERROR: Query is not the correct length")
return
statement = query[0]
args = list(query[1])
for i in range(len(args)):
if type(args[i]) == str:
- args[i] = unicode(args[i], errors='ignore')
- elif callable(args[i]):
+ args[i] = str(args[i], errors='ignore')
+ elif isinstance(args[i], collections.Callable):
args[i] = str(args[i])
try:
if len(args) == 0:
--- ./automation/DeployBrowsers/configure_firefox.py (original)
+++ ./automation/DeployBrowsers/configure_firefox.py (refactored)
@@ -28,9 +28,9 @@
# Tracking Protection
if browser_params['tracking-protection']:
- print "ERROR: Tracking Protection doesn't seem to work in Firefox 41 with selenium."
- print " It does work in 42 Beta. This will be enabled once that lands in release."
- print " Press Ctrl+C to exit"
+ print("ERROR: Tracking Protection doesn't seem to work in Firefox 41 with selenium.")
+ print(" It does work in 42 Beta. This will be enabled once that lands in release.")
+ print(" Press Ctrl+C to exit")
sys.exit(1)
#fp.set_preference('privacy.trackingprotection.enabled', True)
--- ./automation/DeployBrowsers/deploy_browser.py (original)
+++ ./automation/DeployBrowsers/deploy_browser.py (refactored)
@@ -1,5 +1,5 @@
-import deploy_chrome
-import deploy_firefox
+from . import deploy_chrome
+from . import deploy_firefox
def deploy_browser(status_queue, browser_params, manager_params, crash_recovery):
""" receives a dictionary of browser parameters and passes it to the relevant constructor """
--- ./automation/DeployBrowsers/deploy_firefox.py (original)
+++ ./automation/DeployBrowsers/deploy_firefox.py (refactored)
@@ -1,6 +1,6 @@
from ..MPLogger import loggingclient
from ..Commands.profile_commands import load_profile
-import configure_firefox
+from . import configure_firefox
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from selenium import webdriver
--- ./automation/PostProcessing/Cookie.py (original)
+++ ./automation/PostProcessing/Cookie.py (refactored)
@@ -215,7 +215,7 @@
import string
try:
- from cPickle import dumps, loads
+ from pickle import dumps, loads
except ImportError:
from pickle import dumps, loads
@@ -314,7 +314,7 @@
'\375' : '\\375', '\376' : '\\376', '\377' : '\\377'
}
-_idmap = ''.join(chr(x) for x in xrange(256))
+_idmap = ''.join(chr(x) for x in range(256))
def _quote(str, LegalChars=_LegalChars,
idmap=_idmap, translate=string.translate):
@@ -327,7 +327,7 @@
if "" == translate(str, idmap, LegalChars):
return str
else:
- return '"' + _nulljoin( map(_Translator.get, str, str) ) + '"'
+ return '"' + _nulljoin( list(map(_Translator.get, str, str)) ) + '"'
# end _quote
@@ -503,7 +503,7 @@
# Now add any defined attributes
if attrs is None:
attrs = self._reserved
- items = self.items()
+ items = list(self.items())
items.sort()
for K,V in items:
if V == "": continue
@@ -599,7 +599,7 @@
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
- items = self.items()
+ items = list(self.items())
items.sort()
for K,V in items:
result.append( V.output(attrs, header) )
@@ -610,7 +610,7 @@
def __repr__(self):
L = []
- items = self.items()
+ items = list(self.items())
items.sort()
for K,V in items:
L.append( '%s=%s' % (K,repr(V.value) ) )
@@ -619,7 +619,7 @@
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
- items = self.items()
+ items = list(self.items())
items.sort()
for K,V in items:
result.append( V.js_output(attrs) )
@@ -636,7 +636,7 @@
self.__ParseString(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
- for k, v in rawdata.items():
+ for k, v in list(rawdata.items()):
self[k] = v
return
# end load()
@@ -758,7 +758,7 @@
###########################################################
def _test():
- import doctest, Cookie
+ import doctest, http.cookies
return doctest.testmod(Cookie)
if __name__ == "__main__":
--- ./automation/PostProcessing/build_cookie_table.py (original)
+++ ./automation/PostProcessing/build_cookie_table.py (refactored)
@@ -1,4 +1,4 @@
-from urlparse import urlparse
+from urllib.parse import urlparse
from netlib.odict import ODictCaseless
import sqlite3
import json
@@ -7,7 +7,7 @@
# This should be the modified Cookie.py included
# the standard lib Cookie.py has many bugs
-import Cookie
+from . import Cookie
#Potential formats for expires timestamps
DATE_FORMATS = ['%a, %d-%b-%Y %H:%M:%S %Z','%a, %d %b %Y %H:%M:%S %Z',
@@ -21,12 +21,12 @@
Ignore errors if both of these don't work
"""
try:
- encoded = unicode(string, 'UTF-8')
+ encoded = str(string, 'UTF-8')
except UnicodeDecodeError:
try:
- encoded = unicode(string, 'ISO-8859-1')
+ encoded = str(string, 'ISO-8859-1')
except UnicodeDecodeError:
- encoded = unicode(string, 'UTF-8', errors='ignore')
+ encoded = str(string, 'UTF-8', errors='ignore')
return encoded
def select_date_format(date_string):
@@ -112,20 +112,20 @@
queries = list()
attrs = ()
try:
- if type(cookie_string) == unicode:
+ if type(cookie_string) == str:
cookie_string = cookie_string.encode('utf-8')
cookie = Cookie.BaseCookie(cookie_string) # requires str type
- for key in cookie.keys():
+ for key in list(cookie.keys()):
name = encode_to_unicode(key)
value = encode_to_unicode(cookie[key].coded_value)
if response_cookie:
attrs = parse_cookie_attributes(cookie, key, url)
query = (name, value)+attrs
queries.append(query)
- except Cookie.CookieError, e:
- if verbose: print "[ERROR] - Malformed cookie string"
- if verbose: print "--------- " + cookie_string
- if verbose: print e
+ except Cookie.CookieError as e:
+ if verbose: print("[ERROR] - Malformed cookie string")
+ if verbose: print("--------- " + cookie_string)
+ if verbose: print(e)
pass
return queries
@@ -183,10 +183,10 @@
if commit % 10000 == 0 and commit != 0 and commit != last_commit:
last_commit = commit
con.commit()
- if verbose: print str(commit) + " Cookies Processed"
+ if verbose: print(str(commit) + " Cookies Processed")
row = cur1.fetchone()
con.commit()
- print "Processing HTTP Request Cookies Complete"
+ print("Processing HTTP Request Cookies Complete")
# Parse http response cookies
commit = 0
@@ -214,10 +214,10 @@
if commit % 10000 == 0 and commit != 0 and commit != last_commit:
last_commit = commit
con.commit()
- if verbose: print str(commit) + " Cookies Processed"
+ if verbose: print(str(commit) + " Cookies Processed")
row = cur1.fetchone()
con.commit()
- print "Processing HTTP Response Cookies Complete"
+ print("Processing HTTP Response Cookies Complete")
con.close()
if __name__=='__main__':
--- ./automation/PostProcessing/post_processing.py (original)
+++ ./automation/PostProcessing/post_processing.py (refactored)
@@ -1,10 +1,10 @@
import os
-from build_cookie_table import build_http_cookie_table
+from .build_cookie_table import build_http_cookie_table
def run(manager_params):
""" Post-processing tasks to run after TaskManager finishes """
- print "Starting post-processing tasks..."
+ print("Starting post-processing tasks...")
db_path = manager_params['database_name']
build_http_cookie_table(db_path) # Parse HTTP Cookies
--- ./automation/Proxy/MITMProxy.py (original)
+++ ./automation/Proxy/MITMProxy.py (refactored)
@@ -1,9 +1,9 @@
from ..SocketInterface import clientsocket
from ..MPLogger import loggingclient
-import mitm_commands
+from . import mitm_commands
from libmproxy import controller
-import Queue
+import queue
import sys
import traceback
@@ -51,7 +51,7 @@
msg = q.get(timeout=timeout)
controller.Master.handle(self, *msg)
return True
- except Queue.Empty:
+ except queue.Empty:
return False
def tick(self, q, timeout=0.01):
@@ -73,7 +73,7 @@
try:
controller.Master.run(self)
except KeyboardInterrupt:
- print 'KeyboardInterrupt received. Shutting down'
+ print('KeyboardInterrupt received. Shutting down')
self.shutdown()
sys.exit(0)
except Exception:
--- ./automation/Proxy/deploy_mitm_proxy.py (original)
+++ ./automation/Proxy/deploy_mitm_proxy.py (refactored)
@@ -1,11 +1,11 @@
from ..MPLogger import loggingclient
-import MITMProxy
+from . import MITMProxy
from libmproxy import proxy
from libmproxy.proxy.server import ProxyServer
import threading
import socket
-import Queue
+import queue
import os
@@ -17,7 +17,7 @@
<status_queue> a Queue to report proxy status back to TaskManager
"""
logger = loggingclient(*manager_params['logger_address'])
- proxy_site_queue = Queue.Queue() # queue for crawler to communicate with proxy
+ proxy_site_queue = queue.Queue() # queue for crawler to communicate with proxy
# gets local port from one of the free ports
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
--- ./automation/Proxy/mitm_commands.py (original)
+++ ./automation/Proxy/mitm_commands.py (refactored)
@@ -1,7 +1,7 @@
# This module parses MITM Proxy requests/responses into (command, data pairs)
# This should mean that the MITMProxy code should simply pass the messages + its own data to this module
-from urlparse import urlparse
+from urllib.parse import urlparse
import datetime
import mmh3
import json
@@ -15,12 +15,12 @@
won't always be correct.
"""
try:
- msg = unicode(msg, 'utf8')
+ msg = str(msg, 'utf8')
except UnicodeDecodeError:
try:
- msg = unicode(msg, 'ISO-8859-1')
+ msg = str(msg, 'ISO-8859-1')
except UnicodeDecodeError:
- msg = unicode(msg, 'utf8', 'ignore')
+ msg = str(msg, 'utf8', 'ignore')
return msg
--- ./test/conftest.py (original)
+++ ./test/conftest.py (refactored)
@@ -1,24 +1,24 @@
import pytest
-import utilities
-import commands
+from . import utilities
+import subprocess
def create_xpi():
"""Creates a new xpi using jpm."""
cmd_cd = "cd ../automation/Extension/firefox/"
cmd_jpm = "jpm xpi"
- print commands.getstatusoutput("%s && %s" % (cmd_cd, cmd_jpm))
+ print(subprocess.getstatusoutput("%s && %s" % (cmd_cd, cmd_jpm)))
@pytest.fixture(scope="session", autouse=True)
def prepare_test_setup(request):
"""Run an HTTP server during the tests."""
create_xpi()
- print "\nStarting local_http_server"
+ print("\nStarting local_http_server")
server, server_thread = utilities.start_server()
def local_http_server_stop():
- print "\nClosing server thread..."
+ print("\nClosing server thread...")
server.shutdown()
server_thread.join()
--- ./test/expected.py (original)
+++ ./test/expected.py (refactored)
@@ -1,5 +1,5 @@
""" Contains lists of expected data and or rows for tests """
-from utilities import BASE_TEST_URL, BASE_TEST_URL_DOMAIN, BASE_TEST_URL_NOPATH
+from .utilities import BASE_TEST_URL, BASE_TEST_URL_DOMAIN, BASE_TEST_URL_NOPATH
# Navigator and Screen properties
properties = {
@@ -24,44 +24,44 @@
"window.screen.colorDepth"}
# Canvas Fingerprinting DB calls and property sets
-CANVAS_TEST_URL = u"%s/canvas_fingerprinting.html" % BASE_TEST_URL
+CANVAS_TEST_URL = "%s/canvas_fingerprinting.html" % BASE_TEST_URL
canvas = {(CANVAS_TEST_URL,
- u"HTMLCanvasElement.getContext", u"call", u"", 0, u"2d"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.textBaseline",
- u"set", u"top", None, None),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.font", u"set",
- u"14px 'Arial'", None, None),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.textBaseline",
- u"set", u"alphabetic", None, None),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillStyle",
- u"set", u"#f60", None, None),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillRect",
- u"call", u"", 0, u"125"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillRect",
- u"call", u"", 1, u"1"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillRect",
- u"call", u"", 2, u"62"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillRect",
- u"call", u"", 3, u"20"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillStyle",
- u"set", u"#069", None, None),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillText",
- u"call", u"", 0, u"BrowserLeaks,com <canvas> 1.0"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillText",
- u"call", u"", 1, u"2"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillText",
- u"call", u"", 2, u"15"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillStyle",
- u"set", u"rgba(102, 204, 0, 0.7)", None, None),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillText",
- u"call", u"", 0, u"BrowserLeaks,com <canvas> 1.0"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillText",
- u"call", u"", 1, u"4"),
- (CANVAS_TEST_URL, u"CanvasRenderingContext2D.fillText",
- u"call", u"", 2, u"17"),
- (CANVAS_TEST_URL, u"HTMLCanvasElement.toDataURL", u"call",
- u"", None, None)
+ "HTMLCanvasElement.getContext", "call", "", 0, "2d"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.textBaseline",
+ "set", "top", None, None),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.font", "set",
+ "14px 'Arial'", None, None),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.textBaseline",
+ "set", "alphabetic", None, None),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillStyle",
+ "set", "#f60", None, None),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillRect",
+ "call", "", 0, "125"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillRect",
+ "call", "", 1, "1"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillRect",
+ "call", "", 2, "62"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillRect",
+ "call", "", 3, "20"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillStyle",
+ "set", "#069", None, None),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillText",
+ "call", "", 0, "BrowserLeaks,com <canvas> 1.0"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillText",
+ "call", "", 1, "2"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillText",
+ "call", "", 2, "15"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillStyle",
+ "set", "rgba(102, 204, 0, 0.7)", None, None),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillText",
+ "call", "", 0, "BrowserLeaks,com <canvas> 1.0"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillText",
+ "call", "", 1, "4"),
+ (CANVAS_TEST_URL, "CanvasRenderingContext2D.fillText",
+ "call", "", 2, "17"),
+ (CANVAS_TEST_URL, "HTMLCanvasElement.toDataURL", "call",
+ "", None, None)
}
adblockplus = {
@@ -69,34 +69,34 @@
# favicon request is made to URL without a path
"%s/favicon.ico" % BASE_TEST_URL_NOPATH}
-js_cookie = (u'%s/js_cookie.html' % BASE_TEST_URL,
- u'%s' % BASE_TEST_URL_DOMAIN,
- u'test_cookie',
- u'Test-0123456789',
- u'%s' % BASE_TEST_URL_DOMAIN,
- u'/')
+js_cookie = ('%s/js_cookie.html' % BASE_TEST_URL,
+ '%s' % BASE_TEST_URL_DOMAIN,
+ 'test_cookie',
+ 'Test-0123456789',
+ '%s' % BASE_TEST_URL_DOMAIN,
+ '/')
-lso_content = [u'%s/lso/setlso.html?lso_test_key=test_key&lso_test_value=REPLACEME' % BASE_TEST_URL, # noqa
- u'localtest.me',
- u'FlashCookie.sol',
- u'localtest.me/FlashCookie.sol',
- u'test_key',
- u'REPLACEME']
+lso_content = ['%s/lso/setlso.html?lso_test_key=test_key&lso_test_value=REPLACEME' % BASE_TEST_URL, # noqa
+ 'localtest.me',
+ 'FlashCookie.sol',
+ 'localtest.me/FlashCookie.sol',
+ 'test_key',
+ 'REPLACEME']
-WEBRTC_TEST_URL = u"%s/webrtc_localip.html" % BASE_TEST_URL
+WEBRTC_TEST_URL = "%s/webrtc_localip.html" % BASE_TEST_URL
-webrtc_calls = ((WEBRTC_TEST_URL, u'RTCPeerConnection.createDataChannel',
- u'call', u'', 0, u''),
- (WEBRTC_TEST_URL, u'RTCPeerConnection.createDataChannel',
- u'call', u'', 1, u'{"reliable":false}'),
- (WEBRTC_TEST_URL, u'RTCPeerConnection.onicecandidate',
- u'set', u'FUNCTION', None, None),
- (WEBRTC_TEST_URL, u'RTCPeerConnection.createDataChannel',
- u'call', u'', 0, u''),
- (WEBRTC_TEST_URL, u'RTCPeerConnection.createOffer',
- u'call', u'', 0, u'FUNCTION'),
- (WEBRTC_TEST_URL, u'RTCPeerConnection.createOffer',
- u'call', u'', 1, u'FUNCTION'))
+webrtc_calls = ((WEBRTC_TEST_URL, 'RTCPeerConnection.createDataChannel',
+ 'call', '', 0, ''),
+ (WEBRTC_TEST_URL, 'RTCPeerConnection.createDataChannel',
+ 'call', '', 1, '{"reliable":false}'),
+ (WEBRTC_TEST_URL, 'RTCPeerConnection.onicecandidate',
+ 'set', 'FUNCTION', None, None),
+ (WEBRTC_TEST_URL, 'RTCPeerConnection.createDataChannel',
+ 'call', '', 0, ''),
+ (WEBRTC_TEST_URL, 'RTCPeerConnection.createOffer',
+ 'call', '', 0, 'FUNCTION'),
+ (WEBRTC_TEST_URL, 'RTCPeerConnection.createOffer',
+ 'call', '', 1, 'FUNCTION'))
# we expect these strings to be present in the WebRTC SDP
webrtc_sdp_offer_strings = ("a=ice-options",
@@ -115,38 +115,38 @@
"a=ssrc:",
"cname:")
-SET_PROP_TEST_PAGE = u'%s/set_property/set_property.js' % BASE_TEST_URL
+SET_PROP_TEST_PAGE = '%s/set_property/set_property.js' % BASE_TEST_URL
set_property = [(SET_PROP_TEST_PAGE,
- u'5', u'3',
- u'set_window_name@%s:5:3\n'
+ '5', '3',
+ 'set_window_name@%s:5:3\n'
'@%s:8:1\n' % (SET_PROP_TEST_PAGE, SET_PROP_TEST_PAGE),
- u'window.HTMLFormElement.action',
- u'set', u'TEST-ACTION', None, None)]
+ 'window.HTMLFormElement.action',
+ 'set', 'TEST-ACTION', None, None)]
page_links = {
- (u'http://localtest.me:8000/test_pages/simple_a.html', u'http://localtest.me:8000/test_pages/simple_c.html'),
- (u'http://localtest.me:8000/test_pages/simple_a.html', u'http://localtest.me:8000/test_pages/simple_d.html'),
- (u'http://localtest.me:8000/test_pages/simple_a.html', u'http://example.com/test.html?localtest.me'),
+ ('http://localtest.me:8000/test_pages/simple_a.html', 'http://localtest.me:8000/test_pages/simple_c.html'),
+ ('http://localtest.me:8000/test_pages/simple_a.html', 'http://localtest.me:8000/test_pages/simple_d.html'),
+ ('http://localtest.me:8000/test_pages/simple_a.html', 'http://example.com/test.html?localtest.me'),
}
# AudioContext and AudioNode symbols we expect from our test script
audio = {
- u"AudioContext.createOscillator",
- u"AudioContext.createAnalyser",
- u"AudioContext.createGain",
- u"AudioContext.createScriptProcessor",
- u"GainNode.gain",
- u"OscillatorNode.type",
- u"OscillatorNode.connect",
- u"AnalyserNode.connect",
- u"ScriptProcessorNode.connect",
- u"AudioContext.destination",
- u"GainNode.connect",
- u"ScriptProcessorNode.onaudioprocess",
- u"OscillatorNode.start",
- u"AnalyserNode.frequencyBinCount",
- u"AnalyserNode.getFloatFrequencyData",
- u"AnalyserNode.disconnect",
- u"ScriptProcessorNode.disconnect",
- u"GainNode.disconnect",
- u"OscillatorNode.stop"}
+ "AudioContext.createOscillator",
+ "AudioContext.createAnalyser",
+ "AudioContext.createGain",
+ "AudioContext.createScriptProcessor",
+ "GainNode.gain",
+ "OscillatorNode.type",
+ "OscillatorNode.connect",
+ "AnalyserNode.connect",
+ "ScriptProcessorNode.connect",
+ "AudioContext.destination",
+ "GainNode.connect",
+ "ScriptProcessorNode.onaudioprocess",
+ "OscillatorNode.start",
+ "AnalyserNode.frequencyBinCount",
+ "AnalyserNode.getFloatFrequencyData",
+ "AnalyserNode.disconnect",
+ "ScriptProcessorNode.disconnect",
+ "GainNode.disconnect",
+ "OscillatorNode.stop"}
--- ./test/manual_test.py (original)
+++ ./test/manual_test.py (refactored)
@@ -1,4 +1,4 @@
-from utilities import BASE_TEST_URL, start_server
+from .utilities import BASE_TEST_URL, start_server
import subprocess
@@ -26,10 +26,10 @@
try:
# http://stackoverflow.com/a/4417735/3104416
for line in run_cmd("%s && %s" % (cmd_cd, cmd_jpm)):
- print bcolors.OKGREEN, line, bcolors.ENDC,
+ print(bcolors.OKGREEN, line, bcolors.ENDC, end=' ')
except KeyboardInterrupt:
- print "Keyboard Interrupt detected, shutting down..."
- print "\nClosing server thread..."
+ print("Keyboard Interrupt detected, shutting down...")
+ print("\nClosing server thread...")
server.shutdown()
thread.join()
--- ./test/openwpmtest.py (original)
+++ ./test/openwpmtest.py (refactored)
@@ -1,7 +1,7 @@
-import utilities
+from . import utilities
import pytest
from time import sleep
-import commands
+import subprocess
from ..automation import TaskManager
@@ -20,7 +20,7 @@
def is_installed(self, pkg_name):
"""Check if a Linux package is installed."""
cmd = 'which %s' % pkg_name
- status, _ = commands.getstatusoutput(cmd)
+ status, _ = subprocess.getstatusoutput(cmd)
return False if status else True
def assert_is_installed(self, pkg):
--- ./test/test_adblock_plus.py (original)
+++ ./test/test_adblock_plus.py (refactored)
@@ -1,12 +1,12 @@
-from urlparse import urlparse
+from urllib.parse import urlparse
import pytest
import os
from ..automation import TaskManager
from ..automation.Errors import BrowserConfigError
from ..automation.platform_utils import fetch_adblockplus_list
-import utilities
-import expected
+from . import utilities
+from . import expected
psl = utilities.get_psl()
--- ./test/test_crawl.py (original)
+++ ./test/test_crawl.py (refactored)
@@ -1,10 +1,10 @@
-from urlparse import urlparse
+from urllib.parse import urlparse
import tarfile
import pytest
import os
from ..automation import TaskManager
-import utilities
+from . import utilities
TEST_SITES = [
'http://google.com',
--- ./test/test_custom_function_command.py (original)
+++ ./test/test_custom_function_command.py (refactored)
@@ -1,8 +1,8 @@
import pytest # noqa
import os
-import expected
-import utilities
+from . import expected
+from . import utilities
from ..automation import CommandSequence
from ..automation import TaskManager
@@ -31,7 +31,7 @@
manager_params = kwargs['manager_params']
link_elements = driver.find_elements_by_tag_name('a')
link_urls = [element.get_attribute("href") for element in link_elements]
- link_urls = filter(lambda x: x.startswith(scheme+'://'), link_urls)
+ link_urls = [x for x in link_urls if x.startswith(scheme+'://')]
current_url = driver.current_url
sock = clientsocket()
--- ./test/test_env.py (original)
+++ ./test/test_env.py (refactored)
@@ -1,5 +1,5 @@
import pytest
-from openwpmtest import OpenWPMTest
+from .openwpmtest import OpenWPMTest
class TestDependencies(OpenWPMTest):
--- ./test/test_extension.py (original)
+++ ./test/test_extension.py (refactored)
@@ -1,8 +1,8 @@
import pytest # NOQA
import os
-import utilities
-import expected
-from openwpmtest import OpenWPMTest
+from . import utilities
+from . import expected
+from .openwpmtest import OpenWPMTest
from ..automation import TaskManager
# TODO: add test for setter instrumentation
@@ -89,7 +89,7 @@
observed_rows = set()
for item in rows:
if item[1] == "RTCPeerConnection.setLocalDescription":
- assert item[2:5] == (u'call', u'', 0)
+ assert item[2:5] == ('call', '', 0)
sdp_offer = item[5]
self.check_webrtc_sdp_offer(sdp_offer)
else:
--- ./test/test_simple_commands.py (original)
+++ ./test/test_simple_commands.py (refactored)
@@ -2,7 +2,7 @@
from PIL import Image
import os
-import utilities
+from . import utilities
from ..automation import CommandSequence
from ..automation import TaskManager
--- ./test/test_storage_vectors.py (original)
+++ ./test/test_storage_vectors.py (refactored)
@@ -1,33 +1,33 @@
import pytest
import time
import os
-import utilities
+from . import utilities
from ..automation import CommandSequence
from ..automation import TaskManager
expected_lso_content_a = [
1, # visit id
- u'localtest.me',
- u'FlashCookie.sol',
- u'localtest.me/FlashCookie.sol',
- u'test_key',
- u'REPLACEME']
+ 'localtest.me',
+ 'FlashCookie.sol',
+ 'localtest.me/FlashCookie.sol',
+ 'test_key',
+ 'REPLACEME']
expected_lso_content_b = [
2, # visit id
- u'localtest.me',
- u'FlashCookie.sol',
- u'localtest.me/FlashCookie.sol',
- u'test_key',
- u'REPLACEME']
+ 'localtest.me',
+ 'FlashCookie.sol',
+ 'localtest.me/FlashCookie.sol',
+ 'test_key',
+ 'REPLACEME']
expected_js_cookie = (
1, # visit id
- u'%s' % utilities.BASE_TEST_URL_DOMAIN,
- u'test_cookie',
- u'Test-0123456789',
- u'%s' % utilities.BASE_TEST_URL_DOMAIN,
- u'/')
+ '%s' % utilities.BASE_TEST_URL_DOMAIN,
+ 'test_cookie',
+ 'Test-0123456789',
+ '%s' % utilities.BASE_TEST_URL_DOMAIN,
+ '/')
class TestStorageVectors():
""" Runs some basic tests to check that the saving of
--- ./test/utilities.py (original)
+++ ./test/utilities.py (refactored)
@@ -1,6 +1,6 @@
from publicsuffix import PublicSuffixList, fetch
-import SimpleHTTPServer
-import SocketServer
+import http.server
+import socketserver
import threading
import codecs
import os
@@ -13,7 +13,7 @@
LOCAL_WEBSERVER_PORT)
BASE_TEST_URL = "%s/test_pages" % BASE_TEST_URL_NOPATH
-class MyTCPServer(SocketServer.TCPServer):
+class MyTCPServer(socketserver.TCPServer):
"""Subclass TCPServer to be able to reuse the same port (Errno 98)."""
allow_reuse_address = True
@@ -25,13 +25,13 @@
don't fire on `file://*`. Instead, point test code to
`http://localtest.me:8000/test_pages/...`
"""
- print "Starting HTTP Server in a separate thread"
- Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
+ print("Starting HTTP Server in a separate thread")
+ Handler = http.server.SimpleHTTPRequestHandler
server = MyTCPServer(("localhost", LOCAL_WEBSERVER_PORT), Handler)
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
- print "...serving at port", LOCAL_WEBSERVER_PORT
+ print("...serving at port", LOCAL_WEBSERVER_PORT)
return server, thread
@@ -46,7 +46,7 @@
Grabs an updated public suffix list.
"""
if not os.path.isfile(PSL_CACHE_LOC):
- print "%s does not exist, downloading a copy." % PSL_CACHE_LOC
+ print("%s does not exist, downloading a copy." % PSL_CACHE_LOC)
psl_file = fetch()
with codecs.open(PSL_CACHE_LOC, 'w', encoding='utf8') as f:
f.write(psl_file.read())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.