Skip to content

Instantly share code, notes, and snippets.

@Agmagor
Last active August 29, 2015 14:04
Show Gist options
  • Save Agmagor/dbe542ce4df63b49222a to your computer and use it in GitHub Desktop.
Save Agmagor/dbe542ce4df63b49222a to your computer and use it in GitHub Desktop.
--- ./tldextract/tldextract.py (original)
+++ ./tldextract/tldextract.py (refactored)
@@ -19,10 +19,10 @@
'forums.bbc'
"""
-from __future__ import with_statement
+
try:
- import cPickle as pickle
+ import pickle as pickle
except ImportError:
import pickle
import codecs
@@ -52,19 +52,19 @@
import socket
try:
- string_types = basestring
+ string_types = str
except NameError:
string_types = str
try: # pragma: no cover
# Python 2
- from urllib2 import urlopen
- from urlparse import scheme_chars
+ from urllib.request import urlopen
+ from urllib.parse import scheme_chars
except ImportError: # pragma: no cover
# Python 3
from urllib.request import urlopen
from urllib.parse import scheme_chars
- unicode = str
+ str = str
LOG = logging.getLogger("tldextract")
@@ -102,15 +102,15 @@
def _asdict(self):
'Return a new dict which maps field names to their values'
- base_zip = zip(self._fields, self)
+ base_zip = list(zip(self._fields, self))
zipped = base_zip + [('tld', self.tld)]
return dict(zipped)
def _replace(_self, **kwds):
'Return a new ExtractResult object replacing specified fields with new values'
- result = _self._make(map(kwds.pop, ('subdomain', 'domain', 'suffix'), _self))
+ result = _self._make(list(map(kwds.pop, ('subdomain', 'domain', 'suffix'), _self)))
if kwds:
- raise ValueError('Got unexpected field names: %r' % kwds.keys())
+ raise ValueError('Got unexpected field names: %r' % list(kwds.keys()))
return result
def __getnewargs__(self):
@@ -317,14 +317,14 @@
return _decode_utf8(s)
LOG.error('No Public Suffix List found. Consider using a mirror or constructing your TLDExtract with `fetch=False`.')
- return u''
+ return ''
def _decode_utf8(s):
""" Decode from utf8 to Python unicode string.
The suffix list, wherever its origin, should be UTF-8 encoded.
"""
- return unicode(s, 'utf-8')
+ return str(s, 'utf-8')
class _PublicSuffixListTLDExtractor(object):
def __init__(self, tlds):
@@ -361,7 +361,7 @@
parser.add_argument('--version', action='version', version='%(prog)s ' + distribution.version)
parser.add_argument('input', metavar='fqdn|url',
- type=unicode, nargs='*', help='fqdn or url')
+ type=str, nargs='*', help='fqdn or url')
parser.add_argument('-u', '--update', default=False, action='store_true', help='force fetch the latest TLD definitions')
parser.add_argument('-c', '--cache_file', help='use an alternate TLD definition file')
@@ -378,7 +378,7 @@
exit(1)
for i in args.input:
- print(' '.join(extract(i)))
+ print((' '.join(extract(i))))
if __name__ == "__main__":
main()
RefactoringTool: Refactored ./tldextract/tests/all.py
--- ./tldextract/tests/all.py (original)
+++ ./tldextract/tests/all.py (refactored)
@@ -113,7 +113,7 @@
self.assertExtract('216.22', 'project', 'coop', 'http://216.22.project.coop/')
def test_punycode(self):
- self.assertExtract('', u'россия', u'рф', 'http://xn--h1alffa9f.xn--p1ai')
+ self.assertExtract('', 'россия', 'рф', 'http://xn--h1alffa9f.xn--p1ai')
def test_empty(self):
self.assertExtract('', '', '', 'http://')
RefactoringTool: Refactored ./tldextract_app/handlers.py
--- ./tldextract_app/handlers.py (original)
+++ ./tldextract_app/handlers.py (refactored)
@@ -1,4 +1,4 @@
-from cStringIO import StringIO
+from io import StringIO
import tldextract
import web
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment