Skip to content

Instantly share code, notes, and snippets.

@wickman
Created May 13, 2014 17:08
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save wickman/8d34a7c578f8c5be44bf to your computer and use it in GitHub Desktop.
Save wickman/8d34a7c578f8c5be44bf to your computer and use it in GitHub Desktop.
python goal backend diff
diff --git a/3rdparty/python/BUILD b/3rdparty/python/BUILD
index 1188c3f..7427891 100644
--- a/3rdparty/python/BUILD
+++ b/3rdparty/python/BUILD
@@ -14,14 +14,14 @@
# limitations under the License.
# ==================================================================================================
-def make_dep(name, dependency_name=None):
+def make_dep(name, dependency_name=None, **kw):
dependency_name = dependency_name or name
python_library(
name=name,
- dependencies=[python_requirement(dependency_name)]
+ dependencies=[python_requirement(dependency_name, **kw)],
)
-make_dep('antlr-3.1.3', 'antlr_python_runtime==3.1.3')
+make_dep('antlr-3.1.3', 'antlr_python_runtime==3.1.3', use_2to3=True)
make_dep('beautifulsoup', 'BeautifulSoup==3.2.0')
make_dep('boto', 'boto==2.0')
make_dep('bottle')
@@ -62,7 +62,7 @@ make_dep('pytz')
make_dep('pyyaml')
make_dep('rbtools', 'RBTools==0.4.2')
make_dep('redis', 'redis==2.4.9')
-make_dep('thrift')
+make_dep('thrift', use_2to3=True)
make_dep('tornado')
make_dep('twython', 'twython==1.4.3')
-make_dep('zookeeper', 'zc-zookeeper-static')
+make_dep('zookeeper', 'zc-zookeeper-static', compatibility=['<3'])
diff --git a/BUILD.twitter b/BUILD.twitter
index 8cd6132..72f392d 100644
--- a/BUILD.twitter
+++ b/BUILD.twitter
@@ -73,7 +73,7 @@ def buildinfo(context):
branchname = scm.branch_name or revision
now = time.localtime()
- with context.state('extra_buildinfo', {}) as state:
+ with context.state.map('extra_buildinfo') as state:
state.update({
'build.git.branchname': branchname,
'build.git.revision': revision,
@@ -144,11 +144,11 @@ class ThriftstoreCodeGen(Task):
def execute(self, targets):
def is_dml_target(target):
return isinstance(target, JavaThriftstoreDMLLibrary)
- if len(filter(is_dml_target, targets)) == 0:
+ if not any(filter(is_dml_target, targets)):
return
def get_exe_build_target():
- return pants('src/python/twitter/storage/management:thriftstore_codegen').resolve().next()
+ return next(pants('src/python/twitter/storage/management:thriftstore_codegen').resolve())
exe = ParseContext(BuildFile(ROOT_DIR, os.path.basename(__file__))).do_in_context(get_exe_build_target)
pex = os.path.join(ROOT_DIR, self.context.config.get('thriftstore-dml-gen', 'thriftstore-codegen'))
diff --git a/bootstrap_pants.py b/bootstrap_pants.py
index dba0cca..d5c286e 100644
--- a/bootstrap_pants.py
+++ b/bootstrap_pants.py
@@ -134,7 +134,7 @@ def import_commons(version, commons_base, python_base):
return tempdir
-def build_pants(filename='pants.pex', version=None, pypi=True, repos=[]):
+def build_pants(filename='pants.pex', version=None, pypi=True, repos=[], cache=None):
"""
A pants.pex binary bootstrapper.
"""
@@ -143,13 +143,14 @@ def build_pants(filename='pants.pex', version=None, pypi=True, repos=[]):
from twitter.common.python.obtainer import Obtainer
from twitter.common.python.pex_builder import PEXBuilder
from twitter.common.python.resolver import Resolver
- from twitter.common.python.translator import Translator
pb = PEXBuilder()
fetchers = [Fetcher(repos)]
if pypi:
fetchers.append(PyPIFetcher())
- resolver = Resolver(crawler=Crawler(), fetchers=fetchers)
+ if cache:
+ fetchers.append(Fetcher([cache]))
+ resolver = Resolver(cache=cache, crawler=Crawler(), fetchers=fetchers, install_cache=cache)
req = 'twitter.pants'
if version:
req += '==%s' % version
@@ -195,6 +196,8 @@ def main():
parser.add_option("--pypi", "--no-pypi", action="callback", callback=set_bool,
dest="pypi", default=True,
help="Whether or not to look for packages in PyPI.")
+ parser.add_option("--cache", dest="cache", default=None,
+ help="A local cache to use for eggs.")
parser.add_option("--commons-core-repo",
dest="commons_core_repo",
default="http://pypi.python.org/package/source/t/twitter.common-core/",
@@ -217,10 +220,12 @@ def main():
build_pants(options.pex_name,
version=options.pants_version,
pypi=options.pypi,
- repos=[options.commons_core_repo, options.commons_python_repo] + options.repos)
+ repos=[options.commons_core_repo, options.commons_python_repo] + options.repos,
+ cache=options.cache)
except:
- _, exc, _ = sys.exc_info()
- print('Failed to bootstrap pants: %s' % exc)
+ import traceback
+ print('Failed to bootstrap pants')
+ traceback.print_exc()
return 1
finally:
if td1: shutil.rmtree(td1)
diff --git a/pants.ini b/pants.ini
index fdca254..7a05a07 100644
--- a/pants.ini
+++ b/pants.ini
@@ -150,10 +150,7 @@ java: {
}
python: {
'gen': 'py:newstyle',
- 'deps': {
- 'service': ['3rdparty/python:thrift'],
- 'structs': ['3rdparty/python:thrift']
- }
+ 'egg_version': '0.8.0',
}
@@ -370,26 +367,35 @@ java_maximum_heap_size_mb: 1024
[python-setup]
artifact_cache: %(pants_workdir)s/python/artifacts
-bootstrap_cache: %(pants_workdir)s/python/pip
download_cache: %(pants_workdir)s/python/downloads
install_cache: %(pants_workdir)s/python/eggs
+interpreter_cache: %(pants_workdir)s/python/interpreters
+sdist_cache: %(pants_workdir)s/python/sdists
virtualenv_target: %(bootstrap_cache)s/virtualenv-1.7.1.2
virtualenv_urls: [
'http://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.7.1.2.tar.gz',
'https://svn.twitter.biz/science-binaries/home/third_party/python/virtualenv-1.7.1.2.tar.gz']
-bootstrap_packages: ['pip','mako']
+bootstrap_packages: ['mako']
platforms: [
'current',
'linux-x86_64']
+[python-lint]
+pylintrc: %(pants_supportdir)s/pylint/pylint.rc
+
+
[python-repos]
repos: [
'%(buildroot)s/3rdparty/python/',
- 'https://svn.twitter.biz/science-binaries/home/third_party/python/dist/',
- 'https://svn.twitter.biz/science-binaries/home/third_party/python/']
+ '/Users/wickman/clients/science-binaries/home/third_party/python/dist',
+ '/Users/wickman/clients/science-binaries/home/third_party/python/']
+
+# Online repos
+#'https://svn.twitter.biz/science-binaries/home/third_party/python/dist/',
+#'https://svn.twitter.biz/science-binaries/home/third_party/python/']
# If you want to enable external access, add your cheeseshop indices here:
# indices: ['pypi.python.org']
diff --git a/src/python/twitter/common/dirutil/__init__.py b/src/python/twitter/common/dirutil/__init__.py
index 2a10707..e5675fd 100644
--- a/src/python/twitter/common/dirutil/__init__.py
+++ b/src/python/twitter/common/dirutil/__init__.py
@@ -77,6 +77,15 @@ def safe_mkdtemp(cleaner=_mkdtemp_atexit_cleaner, **kw):
return td
+def register_rmtree(directory):
+ """
+ Register an existing directory to be cleaned up at process exit.
+ """
+ with _MKDTEMP_LOCK:
+ _mkdtemp_register_cleaner(_mkdtemp_atexit_cleaner)
+ _MKDTEMP_DIRS[os.getpid()].add(directory)
+
+
def safe_rmtree(directory):
"""
Delete a directory if it's present. If it's not present, no-op.
@@ -190,6 +199,16 @@ def chmod_plus_x(path):
os.chmod(path, path_mode)
+def chmod_plus_w(path):
+ """
+ Equivalent of unix `chmod +w path`
+ """
+ path_mode = os.stat(path).st_mode
+ path_mode &= int('777', 8)
+ path_mode |= stat.S_IWRITE
+ os.chmod(path, path_mode)
+
+
def touch(file, times=None):
"""
Equivalent of unix `touch path`.
diff --git a/src/python/twitter/common/dirutil/chroot.py b/src/python/twitter/common/dirutil/chroot.py
index f899d5c..201426e 100644
--- a/src/python/twitter/common/dirutil/chroot.py
+++ b/src/python/twitter/common/dirutil/chroot.py
@@ -26,6 +26,7 @@ import zipfile
from . import safe_mkdir
+
class Chroot(object):
"""
A chroot of files overlayed from one directory to another directory.
@@ -65,13 +66,14 @@ class Chroot(object):
"""
self.root = root
- def dup(self):
- td = tempfile.mkdtemp()
- new_chroot = Chroot(td)
- shutil.rmtree(td) # because copytree requires that td has not been created.
+ def clone(self, into=None):
+ into = into or tempfile.mkdtemp()
+ new_chroot = Chroot(into)
new_chroot.root = self.root
- new_chroot.filesets = copy.deepcopy(self.filesets)
- shutil.copytree(self.chroot, td)
+ for label, fileset in self.filesets.items():
+ for fn in fileset:
+ new_chroot.link(os.path.join(self.chroot, self.root or '', fn),
+ fn, label=label)
return new_chroot
def path(self):
@@ -94,10 +96,7 @@ class Chroot(object):
safe_mkdir(dirname)
def _rootjoin(self, path):
- if self.root is None:
- return path
- else:
- return os.path.join(self.root, path)
+ return os.path.join(self.root or '', path)
def copy(self, src, dst, label=None):
"""
@@ -111,8 +110,7 @@ class Chroot(object):
"""
self._tag(dst, label)
self._mkdir_for(dst)
- shutil.copyfile(self._rootjoin(src),
- os.path.join(self.chroot, dst))
+ shutil.copyfile(self._rootjoin(src), os.path.join(self.chroot, dst))
def link(self, src, dst, label=None):
"""
diff --git a/src/python/twitter/common/dirutil/fileset.py b/src/python/twitter/common/dirutil/fileset.py
index c74ffb7..51916ac 100644
--- a/src/python/twitter/common/dirutil/fileset.py
+++ b/src/python/twitter/common/dirutil/fileset.py
@@ -14,6 +14,7 @@
# limitations under the License.
# ==================================================================================================
+from functools import reduce
import fnmatch
import glob
import os
diff --git a/src/python/twitter/common/lang/__init__.py b/src/python/twitter/common/lang/__init__.py
index 3093932..dddd8c1 100644
--- a/src/python/twitter/common/lang/__init__.py
+++ b/src/python/twitter/common/lang/__init__.py
@@ -166,7 +166,7 @@ class InheritDocstringsMetaclass(type):
"""
def __new__(self, class_name, bases, namespace):
- for key, value in namespace.iteritems():
+ for key, value in namespace.items():
if callable(value) and not value.__doc__:
for parent in bases:
if hasattr(parent, key) and getattr(parent, key).__doc__:
diff --git a/src/python/twitter/common/python/base.py b/src/python/twitter/common/python/base.py
index 97d4953..20a70d1 100644
--- a/src/python/twitter/common/python/base.py
+++ b/src/python/twitter/common/python/base.py
@@ -5,8 +5,20 @@ from twitter.common.lang import Compatibility
from pkg_resources import Requirement
+REQUIRED_ATTRIBUTES = (
+ 'extras',
+ 'key',
+ 'project_name',
+ 'specs',
+)
+
+
+def quacks_like_req(req):
+ return all(hasattr(req, attr) for attr in REQUIRED_ATTRIBUTES)
+
+
def maybe_requirement(req):
- if isinstance(req, Requirement):
+ if isinstance(req, Requirement) or quacks_like_req(req):
return req
elif isinstance(req, Compatibility.string):
return Requirement.parse(req)
@@ -14,8 +26,10 @@ def maybe_requirement(req):
def maybe_requirement_list(reqs):
- if isinstance(reqs, (Compatibility.string, Requirement)):
+ try:
return [maybe_requirement(reqs)]
- elif isinstance(reqs, Iterable):
+ except ValueError:
+ pass
+ if isinstance(reqs, Iterable):
return [maybe_requirement(req) for req in reqs]
raise ValueError('Unknown requirement list %r' % (reqs,))
diff --git a/src/python/twitter/common/python/distiller.py b/src/python/twitter/common/python/distiller.py
index 42d54ab..728b73d 100644
--- a/src/python/twitter/common/python/distiller.py
+++ b/src/python/twitter/common/python/distiller.py
@@ -44,7 +44,7 @@ def __bootstrap__():
try:
from StringIO import StringIO
except:
- from io import ByteIO as StringIO
+ from io import BytesIO as StringIO
# open multiply-nested-zip
def nested_open(path, full_path=None, zf=None):
@@ -88,7 +88,7 @@ def __bootstrap__():
try:
fd, name = tempfile.mkstemp()
- with os.fdopen(fd, 'w') as fp:
+ with os.fdopen(fd, 'wb') as fp:
fp.write(content)
__file__ = name
@@ -141,7 +141,7 @@ class Distiller(object):
class InvalidDistribution(Exception): pass
- def __init__(self, distribution, debug=True):
+ def __init__(self, distribution, debug=False):
self._debug = debug
self._dist = distribution
assert isinstance(self._dist, Distribution)
@@ -194,7 +194,7 @@ class Distiller(object):
if not fn.endswith('.py'):
continue
- with open(fn) as fn_fp:
+ with open(fn, 'rb') as fn_fp:
try:
parsed_fn = ast.parse(fn_fp.read())
except SyntaxError as e:
@@ -250,7 +250,7 @@ class Distiller(object):
if fn.startswith(egg_info_dir) and not skip(fn):
rel_fn = os.path.relpath(fn, egg_info_dir)
if rel_fn == '.': continue
- with open(fn) as fp:
+ with open(fn, 'rb') as fp:
yield egg_info_name(rel_fn), fp.read()
# dump native_libs.txt
diff --git a/src/python/twitter/common/python/http/http.py b/src/python/twitter/common/python/http/http.py
index 5f40e9c..6850727 100644
--- a/src/python/twitter/common/python/http/http.py
+++ b/src/python/twitter/common/python/http/http.py
@@ -11,12 +11,14 @@ from twitter.common.quantity import Amount, Time
if Compatibility.PY3:
from http.client import parse_headers
+ from queue import Queue, Empty
import urllib.error as urllib_error
import urllib.parse as urlparse
import urllib.request as urllib_request
from urllib.request import addinfourl
else:
from httplib import HTTPMessage
+ from Queue import Queue, Empty
from urllib import addinfourl
import urllib2 as urllib_request
import urllib2 as urllib_error
@@ -35,7 +37,6 @@ def deadline(fn, *args, **kw):
Takes timeout= kwarg, which defaults to Amount(150, Time.MILLISECONDS)
"""
- from Queue import Queue, Empty
from threading import Thread
q = Queue(maxsize=1)
timeout = kw.pop('timeout', Amount(150, Time.MILLISECONDS))
@@ -132,7 +133,7 @@ class CachedWeb(object):
return self.age(url) > 0
def translate_url(self, url):
- return os.path.join(self._cache, hashlib.md5(url).hexdigest())
+ return os.path.join(self._cache, hashlib.md5(url.encode('utf8')).hexdigest())
def translate_all(self, url):
return ('%(tgt)s %(tgt)s.tmp %(tgt)s.headers %(tgt)s.headers.tmp' % {
@@ -175,11 +176,11 @@ class CachedWeb(object):
def decode_url(self, url):
target, _, headers, _ = self.translate_all(url)
- headers_fp = open(headers)
+ headers_fp = open(headers, 'rb')
code, = struct.unpack('>h', headers_fp.read(2))
def make_headers(fp):
return HTTPMessage(fp) if Compatibility.PY2 else parse_headers(fp)
- return addinfourl(open(target), make_headers(headers_fp), url, code)
+ return addinfourl(open(target, 'rb'), make_headers(headers_fp), url, code)
def clear_url(self, url):
for path in self.translate_all(url):
diff --git a/src/python/twitter/common/python/http/link.py b/src/python/twitter/common/python/http/link.py
index 66c0166..9105ca7 100644
--- a/src/python/twitter/common/python/http/link.py
+++ b/src/python/twitter/common/python/http/link.py
@@ -17,7 +17,8 @@ from pkg_resources import (
Distribution,
EGG_NAME,
parse_version,
- Requirement)
+ Requirement,
+ safe_name)
class Link(object):
@@ -149,7 +150,7 @@ class SourceLink(ExtendedLink):
@property
def name(self):
- return self._name
+ return safe_name(self._name)
@property
def raw_version(self):
@@ -199,7 +200,7 @@ class EggLink(ExtendedLink):
@property
def name(self):
- return self._name
+ return safe_name(self._name)
@property
def raw_version(self):
diff --git a/src/python/twitter/common/python/installer.py b/src/python/twitter/common/python/installer.py
index 1c0e4b4..a32ba39 100644
--- a/src/python/twitter/common/python/installer.py
+++ b/src/python/twitter/common/python/installer.py
@@ -8,6 +8,7 @@ import tempfile
from twitter.common.dirutil import safe_mkdtemp, safe_rmtree
+from .interpreter import PythonInterpreter
from .tracer import TRACER
from pkg_resources import Distribution, PathMetadata
@@ -47,7 +48,7 @@ exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
class InstallFailure(Exception): pass
- def __init__(self, source_dir, strict=True):
+ def __init__(self, source_dir, strict=True, interpreter=None):
"""
Create an installer from an unpacked source distribution in source_dir.
@@ -58,6 +59,7 @@ exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
self._install_tmp = safe_mkdtemp()
self._installed = None
self._strict = strict
+ self._interpreter = interpreter or PythonInterpreter.get()
fd, self._install_record = tempfile.mkstemp()
os.close(fd)
@@ -72,25 +74,19 @@ exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
def run(self):
if self._installed is not None:
return self._installed
- setuptools_path = None
- for item in sys.path:
- for dist in pkg_resources.find_distributions(item):
- if dist.project_name == 'distribute':
- setuptools_path = dist.location
- break
- if setuptools_path is None and self._strict:
+ if self._interpreter.distribute is None and self._strict:
self._installed = False
print('Failed to find distribute in sys.path!', file=sys.stderr)
return self._installed
setup_bootstrap = Installer.SETUP_BOOTSTRAP % {
- 'setuptools_path': setuptools_path or '',
+ 'setuptools_path': self._interpreter.distribute or '',
'setup_py': 'setup.py'
}
with TRACER.timed('Installing %s' % self._install_tmp, V=2):
po = subprocess.Popen(
- [sys.executable,
+ [self._interpreter.binary,
'-',
'install',
'--root=%s' % self._install_tmp,
@@ -111,6 +107,7 @@ exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
return self._installed
installed_files = []
+ egg_info = None
with open(self._install_record) as fp:
installed_files = fp.read().splitlines()
for line in installed_files:
diff --git a/src/python/twitter/common/python/interpreter.py b/src/python/twitter/common/python/interpreter.py
index 5433bc0..9259781 100644
--- a/src/python/twitter/common/python/interpreter.py
+++ b/src/python/twitter/common/python/interpreter.py
@@ -1,23 +1,28 @@
+"""
+twitter.common.python support for interpreter environments.
+
+PythonInterpreter
+ - binary
+ - identity (class - e.g. CPython/PyPy, major, minor, patch)
+ - distribute distribution
+"""
+
try:
from numbers import Integral
except ImportError:
Integral = (int, long)
+from collections import defaultdict
import os
import re
import subprocess
import sys
-from collections import defaultdict
-
-if not hasattr(__builtins__, 'any'):
- def any(genexpr):
- for expr in genexpr:
- if expr:
- return True
- return False
+from pkg_resources import Distribution, Requirement
+# Determine in the most platform-compatible way possible the identity of the interpreter
+# and whether or not it has a distribute egg.
ID_PY = b"""
import sys
@@ -26,16 +31,29 @@ if hasattr(sys, 'subversion'):
else:
subversion = 'CPython'
+setuptools_path = None
+try:
+ import pkg_resources
+ for item in sys.path:
+ for dist in pkg_resources.find_distributions(item):
+ if dist.project_name == 'distribute':
+ setuptools_path = dist.location
+except ImportError:
+ pass
+
print("%s %s %s %s" % (
subversion,
sys.version_info[0],
sys.version_info[1],
sys.version_info[2]))
+print(setuptools_path)
"""
class PythonIdentity(object):
- class InvalidError(Exception): pass
+ class Error(Exception): pass
+ class InvalidError(Error): pass
+ class UnknownRequirement(Error): pass
@staticmethod
def get():
@@ -45,18 +63,24 @@ class PythonIdentity(object):
subversion = 'CPython'
return PythonIdentity(subversion, sys.version_info[0], sys.version_info[1], sys.version_info[2])
- @staticmethod
- def from_id_string(id_string):
+ @classmethod
+ def from_id_string(cls, id_string):
values = id_string.split()
if len(values) != 4:
- raise PythonIdentity.InvalidError("Invalid id string: %s" % id_string)
- return PythonIdentity(str(values[0]), int(values[1]), int(values[2]), int(values[3]))
+ raise cls.InvalidError("Invalid id string: %s" % id_string)
+ return cls(str(values[0]), int(values[1]), int(values[2]), int(values[3]))
+
+ @classmethod
+ def from_path(cls, dirname):
+ interp, version = dirname.split('-')
+ major, minor, patch = version.split('.')
+ return cls(str(interp), int(major), int(minor), int(patch))
- def __init__(self, interpreter, major, minor, subminor):
- for var in (major, minor, subminor):
+ def __init__(self, interpreter, major, minor, patch):
+ for var in (major, minor, patch):
assert isinstance(var, Integral)
self._interpreter = interpreter
- self._version = (major, minor, subminor)
+ self._version = (major, minor, patch)
@property
def interpreter(self):
@@ -66,8 +90,39 @@ class PythonIdentity(object):
def version(self):
return self._version
+ @property
+ def requirement(self):
+ return self.distribution.as_requirement()
+
+ @property
+ def distribution(self):
+ return Distribution(project_name=self._interpreter, version='.'.join(map(str, self._version)))
+
+ @classmethod
+ def parse_requirement(cls, requirement, default_interpreter='CPython'):
+ if isinstance(requirement, Requirement):
+ return requirement
+ elif isinstance(requirement, str):
+ try:
+ requirement = Requirement.parse(requirement)
+ except ValueError:
+ try:
+ requirement = Requirement.parse('%s%s' % (default_interpreter, requirement))
+ except ValueError:
+ raise ValueError('Unknown requirement string: %s' % requirement)
+ return requirement
+ else:
+ raise ValueError('Unknown requirement type: %r' % (requirement,))
+
+ def matches(self, requirement):
+ """Given a Requirement, check if this interpreter matches."""
+ try:
+ requirement = self.parse_requirement(requirement, self._interpreter)
+ except ValueError as e:
+ raise self.UnknownRequirement(str(e))
+ return self.distribution in requirement
+
def hashbang(self):
- # TODO(wickman) Must be a better way.
return '#!/usr/bin/env python%s.%s' % self._version[0:2]
def __str__(self):
@@ -75,54 +130,55 @@ class PythonIdentity(object):
self._version[0], self._version[1], self._version[2])
def __repr__(self):
- return 'PythonIdentity("%s", %s, %s, %s)' % (
- self._interpreter,
- self._version[0], self._version[1], self._version[2])
+ return 'PythonIdentity(%r, %s, %s, %s)' % (
+ self._interpreter, self._version[0], self._version[1], self._version[2])
+
+ def __eq__(self, other):
+ return all([isinstance(other, PythonIdentity),
+ self.interpreter == other.interpreter,
+ self.version == other.version])
+
+ def __hash__(self):
+ return hash((self._interpreter, self._version))
class PythonInterpreter(object):
REGEXEN = (
- re.compile(r'python$'), re.compile(r'python[23].[0-9]$'),
- re.compile(r'pypy$'), re.compile(r'pypy-1.[0-9]$'),
+ # re.compile(r'jython$'), -- Leave this out until support is hashed out
+ re.compile(r'python$'),
+ re.compile(r'python[23].[0-9]$'),
+ re.compile(r'pypy$'),
+ re.compile(r'pypy-1.[0-9]$'),
)
- @staticmethod
- def get():
- return PythonInterpreter(sys.executable, interpreter=PythonIdentity.get())
-
- @staticmethod
- def all(paths=os.getenv('PATH').split(':')):
- return PythonInterpreter.filter(PythonInterpreter.find(paths))
-
- @staticmethod
- def from_binary(binary):
- po = subprocess.Popen([binary], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
- so, _ = po.communicate(ID_PY)
- return PythonInterpreter(binary, PythonIdentity.from_id_string(so.decode('utf8')))
-
- def __init__(self, binary=sys.executable, interpreter=None):
- """
- :binary => binary of python interpreter
- (if None, default to sys.executable)
- """
- self._binary = binary
-
- if binary == sys.executable and interpreter is None:
- self._identity = PythonIdentity.get()
- else:
- self._identity = interpreter or PythonInterpreter.from_binary(binary).identity()
-
- def binary(self):
- return self._binary
-
- def identity(self):
- return self._identity
-
- def __repr__(self):
- return 'PythonInterpreter(%r, %r)' % (self._binary, self._identity)
-
- @staticmethod
- def find(paths):
+ CACHE = {} # memoize executable => PythonInterpreter
+
+ class Error(Exception): pass
+ class IdentificationError(Error): pass
+
+ @classmethod
+ def get(cls):
+ return cls(sys.executable, interpreter=PythonIdentity.get())
+
+ @classmethod
+ def all(cls, paths=os.getenv('PATH').split(':')):
+ return cls.filter(PythonInterpreter.find(paths))
+
+ @classmethod
+ def from_binary(cls, binary):
+ if binary not in cls.CACHE:
+ po = subprocess.Popen([binary], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ so, _ = po.communicate(ID_PY)
+ output = so.decode('utf8').splitlines()
+ if len(output) != 2:
+ raise cls.IdentificationError("Could not establish identity of %s" % binary)
+ id_string, distribute_path = output
+ cls.CACHE[binary] = cls(binary, PythonIdentity.from_id_string(id_string),
+ distribute_path=distribute_path if distribute_path != "None" else None)
+ return cls.CACHE[binary]
+
+ @classmethod
+ def find(cls, paths):
"""
Given a list of files or directories, try to detect python interpreters amongst them.
Returns a list of PythonInterpreter objects.
@@ -137,15 +193,15 @@ class PythonInterpreter(object):
return []
for fn in expand_path(path):
basefile = os.path.basename(fn)
- if any(matcher.match(basefile) is not None for matcher in PythonInterpreter.REGEXEN):
+ if any(matcher.match(basefile) is not None for matcher in cls.REGEXEN):
try:
- pythons.append(PythonInterpreter.from_binary(fn))
+ pythons.append(cls.from_binary(fn))
except:
continue
return pythons
- @staticmethod
- def filter(pythons):
+ @classmethod
+ def filter(cls, pythons):
"""
Given a map of python interpreters in the format provided by PythonInterpreter.find(),
filter out duplicate versions and versions we would prefer not to use.
@@ -159,18 +215,102 @@ class PythonInterpreter(object):
return (version[MAJOR] == 2 and version[MINOR] >= 6 or
version[MAJOR] == 3 and version[MINOR] >= 2)
- all_versions = set(interpreter.identity().version for interpreter in pythons)
+ all_versions = set(interpreter.identity.version for interpreter in pythons)
good_versions = filter(version_filter, all_versions)
for version in good_versions:
# For each candidate, use the latest version we find on the filesystem.
candidates = defaultdict(list)
for interp in pythons:
- if interp.identity().version == version:
- candidates[interp.identity().interpreter].append(interp)
+ if interp.identity.version == version:
+ candidates[interp.identity.interpreter].append(interp)
for interp_class in candidates:
candidates[interp_class].sort(
- key=lambda interp: os.path.getmtime(interp.binary()), reverse=True)
+ key=lambda interp: os.path.getmtime(interp.binary), reverse=True)
good.append(candidates[interp_class].pop(0))
return good
+
+ @classmethod
+ def sanitize_environment(cls):
+ # N.B. This is merely a hack because sysconfig.py on the default OS X
+ # installation of 2.6 is boneheaded.
+ os.unsetenv('MACOSX_DEPLOYMENT_TARGET')
+
+ @classmethod
+ def replace(cls, requirement):
+ self = cls.get()
+ if self.identity.matches(requirement):
+ return False
+ for pi in cls.all():
+ if pi.identity.matches(requirement):
+ break
+ else:
+ raise cls.InterpreterNotFound('Could not find interpreter matching filter!')
+ cls.sanitize_environment()
+ os.execv(pi.binary, [pi.binary] + sys.argv)
+
+ def __init__(self, binary=None, interpreter=None, distribute_path=None):
+ """
+ :binary => binary of python interpreter
+ (if None, default to sys.executable)
+ """
+ self._binary = binary or sys.executable
+ self._binary_stat = os.stat(self._binary)
+
+ if self._binary == sys.executable:
+ self._identity = interpreter or PythonIdentity.get()
+ self._distribute = distribute_path or self._find_distribute()
+ else:
+ self._identity = interpreter or PythonInterpreter.from_binary(self._binary).identity
+ self._distribute = distribute_path
+
+ def _find_distribute(self):
+ import pkg_resources
+ for item in sys.path:
+ for dist in pkg_resources.find_distributions(item):
+ if dist.project_name == 'distribute':
+ return dist.location
+
+ @property
+ def binary(self):
+ return self._binary
+
+ @property
+ def identity(self):
+ return self._identity
+
+ @property
+ def python(self):
+ # return the python version in the format of the 'python' key for distributions
+ # specifically, '2.6', '2.7', '3.2', etc.
+ return '%d.%d' % (self._identity.version[0:2])
+
+ @property
+ def version(self):
+ return self._identity.version
+
+ @property
+ def version_string(self):
+ return str(self._identity)
+
+ @property
+ def distribute(self):
+ return self._distribute
+
+ def __hash__(self):
+ return hash(self._binary_stat)
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return False
+ return self._binary_stat == other._binary_stat
+
+ def __lt__(self, other):
+ if not isinstance(other, self.__class__):
+ return False
+ return self.version < other.version
+
+ def __repr__(self):
+ return '%s(%r, %r, %r)' % (self.__class__.__name__, self._binary, self._identity,
+ self._distribute)
diff --git a/src/python/twitter/common/python/pex.py b/src/python/twitter/common/python/pex.py
index 5e33457..87604e0 100644
--- a/src/python/twitter/common/python/pex.py
+++ b/src/python/twitter/common/python/pex.py
@@ -44,12 +44,21 @@ class PEX(object):
except ImportError:
sys.stderr.write('Could not bootstrap coverage module!\n')
- def __init__(self, pex=sys.argv[0]):
+ @classmethod
+ def clean_environment(cls, forking=False):
+ os.unsetenv('MACOSX_DEPLOYMENT_TARGET')
+ if not forking:
+ for key in filter(lambda key: key.startswith('PEX_'), os.environ):
+ os.unsetenv(key)
+
+ # TODO(wickman) The interpreter should ideally be inferred from the PEX.
+ def __init__(self, pex=sys.argv[0], interpreter=None):
self._pex = PythonDirectoryWrapper.get(pex)
if not self._pex:
raise self.NotFound('Could not find PEX at %s!' % pex)
self._pex_info = PexInfo.from_pex(self._pex)
self._env = PEXEnvironment(self._pex.path(), self._pex_info)
+ self._interpreter = interpreter or PythonInterpreter.get()
@property
def info(self):
@@ -99,6 +108,7 @@ class PEX(object):
return scrubbed_sys_path, scrubbed_importer_cache
def execute(self, args=()):
+ self.clean_environment(forking=True)
entry_point = self.entry()
with mutable_sys():
sys.path, sys.path_importer_cache = self.minimum_path()
@@ -140,13 +150,15 @@ class PEX(object):
if 'PEX_PROFILE' not in os.environ:
runner(entry_point)
else:
- import pstats, cProfile
- profile_output = os.environ['PEX_PROFILE']
- safe_mkdir(os.path.dirname(profile_output))
- cProfile.runctx('runner(entry_point)', globals=globals(), locals=locals(),
+ import tempfile, pstats, cProfile
+ profile_output = tempfile.mktemp()
+ cProfile.runctx('runner(entry_point)',
+ globals=globals(),
+ locals=locals(),
filename=profile_output)
pstats.Stats(profile_output).sort_stats(
os.environ.get('PEX_PROFILE_SORT', 'cumulative')).print_stats(1000)
+ print('Profile written to %s' % profile_output)
@staticmethod
def execute_module(module_name):
@@ -170,8 +182,7 @@ class PEX(object):
['-m', 'pylint.lint']
args: Arguments to be passed to the application being invoked by the environment.
"""
- interpreter = PythonInterpreter(sys.executable)
- cmds = [interpreter.binary()]
+ cmds = [self._interpreter.binary]
cmds.append(self._pex.path())
cmds.extend(args)
return cmds
@@ -185,6 +196,7 @@ class PEX(object):
If false, return the Popen object of the invoked subprocess.
"""
import subprocess
+ self.clean_environment(forking=True)
cmdline = self.cmdline(args)
TRACER.log('PEX.run invoking %s' % ' '.join(cmdline))
diff --git a/src/python/twitter/common/python/pex_builder.py b/src/python/twitter/common/python/pex_builder.py
index 91ce52e..4a2cf77 100644
--- a/src/python/twitter/common/python/pex_builder.py
+++ b/src/python/twitter/common/python/pex_builder.py
@@ -22,13 +22,14 @@ import tempfile
from zipimport import zipimporter
from twitter.common.lang import Compatibility
-from twitter.common.dirutil import chmod_plus_x
+from twitter.common.dirutil import chmod_plus_x, safe_mkdir
from twitter.common.dirutil.chroot import Chroot
-from .interpreter import PythonIdentity
+from .interpreter import PythonIdentity, PythonInterpreter
from .marshaller import CodeMarshaller
from .pex_info import PexInfo
from .pex import PEX
+from .translator import dist_from_egg
from .util import DistributionHelper
from pkg_resources import (
@@ -74,15 +75,21 @@ class PEXBuilder(object):
DEPENDENCY_DIR = ".deps"
BOOTSTRAP_DIR = ".bootstrap"
- def __init__(self, path=None):
- self._chroot = Chroot(path or tempfile.mkdtemp())
- self._pex_info = PexInfo.default()
+ def __init__(self, path=None, interpreter=None, chroot=None, pex_info=None):
+ self._chroot = chroot or Chroot(path or tempfile.mkdtemp())
+ self._pex_info = pex_info or PexInfo.default()
self._frozen = False
+ self._interpreter = interpreter or PythonInterpreter.get()
self._logger = logging.getLogger(__name__)
def chroot(self):
return self._chroot
+ def clone(self, into=None):
+ chroot_clone = self._chroot.clone(into=into)
+ return PEXBuilder(chroot=chroot_clone, interpreter=self._interpreter,
+ pex_info=PexInfo(content=self._pex_info.dump()))
+
def path(self):
return self.chroot().path()
@@ -91,11 +98,6 @@ class PEXBuilder(object):
def add_source(self, filename, env_filename):
self._chroot.link(filename, env_filename, "source")
- if filename.endswith('.py'):
- env_filename_pyc = os.path.splitext(env_filename)[0] + '.pyc'
- with open(filename) as fp:
- pyc_object = CodeMarshaller.from_py(fp.read(), env_filename)
- self._chroot.write(pyc_object.to_pyc(), env_filename_pyc, 'source')
def add_resource(self, filename, env_filename):
self._chroot.link(filename, env_filename, "resource")
@@ -103,12 +105,8 @@ class PEXBuilder(object):
def add_requirement(self, req, dynamic=False, repo=None):
self._pex_info.add_requirement(req, repo=repo, dynamic=dynamic)
- def add_dependency_file(self, filename, env_filename):
- # TODO(wickman) This is broken. The build cache abstraction just breaks down here.
- if filename.endswith('.egg'):
- self.add_egg(filename)
- else:
- self._chroot.link(filename, os.path.join(PEXBuilder.DEPENDENCY_DIR, env_filename))
+ def set_entry_point(self, entry_point):
+ self.info().entry_point = entry_point
def add_egg(self, egg):
"""
@@ -153,30 +151,25 @@ class PEXBuilder(object):
def _prepare_main(self):
self._chroot.write(BOOTSTRAP_ENVIRONMENT, '__main__.py', label='main')
+ # TODO(wickman) Ideally we include twitter.common.python and twitter.common-core via the eggs
+ # rather than this hackish .bootstrap mechanism. (Furthermore, we'll probably need to include
+ # both a pkg_resources and lib2to3 version of pkg_resources.)
def _prepare_bootstrap(self):
"""
Write enough of distribute into the .pex .bootstrap directory so that
we can be fully self-contained.
"""
bare_env = pkg_resources.Environment()
-
- distribute_req = pkg_resources.Requirement.parse('distribute>=0.6.24')
- distribute_dist = None
-
- for dist in DistributionHelper.all_distributions(sys.path):
- if dist in distribute_req and bare_env.can_add(dist):
- distribute_dist = dist
- break
- else:
- raise DistributionNotFound('Could not find distribute!')
-
- for fn, content in DistributionHelper.walk_data(distribute_dist):
- if fn.startswith('pkg_resources.py') or fn.startswith('setuptools'):
+ distribute = dist_from_egg(self._interpreter.distribute)
+ for fn, content in DistributionHelper.walk_data(distribute):
+ # TODO(wickman) Investigate if the omission of setuptools proper causes failures to
+ # build eggs.
+ if fn.startswith('pkg_resources.py'):
self._chroot.write(content, os.path.join(self.BOOTSTRAP_DIR, fn), 'resource')
libraries = (
- 'twitter.common.dirutil',
'twitter.common.collections',
'twitter.common.contextutil',
+ 'twitter.common.dirutil',
'twitter.common.lang',
'twitter.common.python',
'twitter.common.python.http',
@@ -215,10 +208,10 @@ class PEXBuilder(object):
except OSError:
# The expectation is that the file does not exist, so continue
pass
+ safe_mkdir(os.path.dirname(filename))
with open(filename + '~', 'ab') as pexfile:
assert os.path.getsize(pexfile.name) == 0
- # TODO(wickman) Make this tunable
- pexfile.write(Compatibility.to_bytes('%s\n' % PythonIdentity.get().hashbang()))
+ pexfile.write(Compatibility.to_bytes('%s\n' % self._interpreter.identity.hashbang()))
self._chroot.zip(filename + '~', mode='a')
if os.path.exists(filename):
os.unlink(filename)
diff --git a/src/python/twitter/common/python/pex_info.py b/src/python/twitter/common/python/pex_info.py
index d621b22..7c6df81 100644
--- a/src/python/twitter/common/python/pex_info.py
+++ b/src/python/twitter/common/python/pex_info.py
@@ -49,8 +49,8 @@ class PexInfo(object):
def make_build_properties(cls):
pi = PythonInterpreter()
base_info = {
- 'class': pi.identity().interpreter,
- 'version': pi.identity().version,
+ 'class': pi.identity.interpreter,
+ 'version': pi.identity.version,
'platform': get_platform(),
}
try:
diff --git a/src/python/twitter/common/python/resolver.py b/src/python/twitter/common/python/resolver.py
index b630b11..214e88e 100644
--- a/src/python/twitter/common/python/resolver.py
+++ b/src/python/twitter/common/python/resolver.py
@@ -6,6 +6,7 @@ import tempfile
from .base import maybe_requirement_list
from .fetcher import PyPIFetcher
from .http import Crawler
+from .interpreter import PythonInterpreter
from .obtainer import Obtainer
from .platforms import Platform
from .tracer import TRACER
@@ -23,30 +24,27 @@ class ResolverEnvironment(Environment):
return Platform.distribution_compatible(dist, python=self.python, platform=self.platform)
-class Resolver(WorkingSet):
- def __init__(self, cache=None, crawler=None, fetchers=None, install_cache=None,
- conn_timeout=None):
- self._crawler = crawler or Crawler()
- self._fetchers = fetchers or [PyPIFetcher()]
- self._install_cache = install_cache
+class ResolverBase(WorkingSet):
+ """
+ Base class for requirement resolution. Subclass make_installer in order to build
+ a resolver that can resolve distributions from source or from remote repositories.
+ """
+
+ def __init__(self, cache=None):
self._cached_entries = set(find_distributions(cache)) if cache else set()
self._entries = set()
- self._conn_timeout = conn_timeout
- super(Resolver, self).__init__(entries=[])
+ super(ResolverBase, self).__init__(entries=[])
- def make_installer(self, python, platform):
- obtainer = Obtainer(self._crawler, self._fetchers,
- Translator.default(self._install_cache, python=python, platform=platform,
- conn_timeout=self._conn_timeout))
- return obtainer.obtain
+ def make_installer(self, reqs, interpreter, platform):
+ return None
- def resolve(self, requirements, python=Platform.python(), platform=Platform.current()):
+ def resolve(self, requirements, interpreter=PythonInterpreter.get(), platform=Platform.current()):
requirements = maybe_requirement_list(requirements)
env = ResolverEnvironment([d.location for d in (self._entries | self._cached_entries)],
- python=python, platform=platform)
+ python=interpreter.python, platform=platform)
added = set()
- for dist in super(Resolver, self).resolve(requirements, env=env,
- installer=self.make_installer(python, platform)):
+ for dist in super(ResolverBase, self).resolve(requirements, env=env,
+ installer=self.make_installer(requirements, interpreter, platform)):
if dist not in self._entries:
added.add(dist)
self._entries.add(dist)
@@ -54,3 +52,22 @@ class Resolver(WorkingSet):
def distributions(self):
return self._entries
+
+
+class Resolver(ResolverBase):
+ """
+ Default resolver.
+ """
+ def __init__(self, cache=None, crawler=None, fetchers=None, install_cache=None,
+ conn_timeout=None):
+ self._crawler = crawler or Crawler()
+ self._fetchers = fetchers or [PyPIFetcher()]
+ self._install_cache = install_cache
+ self._conn_timeout = conn_timeout
+ super(Resolver, self).__init__(cache=cache)
+
+ def make_installer(self, reqs, interpreter, platform):
+ obtainer = Obtainer(self._crawler, self._fetchers,
+ Translator.default(self._install_cache, interpreter=interpreter, platform=platform,
+ conn_timeout=self._conn_timeout))
+ return obtainer.obtain
diff --git a/src/python/twitter/common/python/translator.py b/src/python/twitter/common/python/translator.py
index ccd2644..a862df5 100644
--- a/src/python/twitter/common/python/translator.py
+++ b/src/python/twitter/common/python/translator.py
@@ -3,12 +3,13 @@ import os
import sys
from zipimport import zipimporter
-from twitter.common.dirutil import safe_rmtree, safe_mkdtemp
+from twitter.common.dirutil import safe_rmtree, safe_mkdtemp, chmod_plus_w
from twitter.common.lang import AbstractClass, Compatibility
from .distiller import Distiller
from .http import SourceLink, EggLink
from .installer import Installer
+from .interpreter import PythonInterpreter
from .platforms import Platform
from .tracer import TRACER
@@ -62,8 +63,27 @@ def dist_from_egg(egg_path):
class SourceTranslator(TranslatorBase):
- def __init__(self, install_cache=None, conn_timeout=None):
+ @classmethod
+ def run_2to3(cls, path):
+ from lib2to3.refactor import get_fixers_from_package, RefactoringTool
+ rt = RefactoringTool(get_fixers_from_package('lib2to3.fixes'))
+ with TRACER.timed('Translating %s' % path):
+ for root, dirs, files in os.walk(path):
+ for fn in files:
+ full_fn = os.path.join(root, fn)
+ if full_fn.endswith('.py'):
+ with TRACER.timed('%s' % fn, V=3):
+ try:
+ chmod_plus_w(full_fn)
+ rt.refactor_file(full_fn, write=True)
+ except IOError as e:
+ TRACER.log('Failed to translate %s: %s' % (fn, e))
+
+ def __init__(self, install_cache=None, interpreter=PythonInterpreter.get(), use_2to3=False,
+ conn_timeout=None):
self._install_cache = install_cache or safe_mkdtemp()
+ self._interpreter = interpreter
+ self._use_2to3 = use_2to3
self._conn_timeout = conn_timeout
def translate(self, link):
@@ -74,8 +94,12 @@ class SourceTranslator(TranslatorBase):
unpack_path, installer = None, None
try:
unpack_path = link.fetch(conn_timeout=self._conn_timeout)
+ version = self._interpreter.version
+ if self._use_2to3 and version >= (3,):
+ self.run_2to3(unpack_path)
with TRACER.timed('Installing %s' % link.name):
- installer = Installer(unpack_path, strict=(link.name != 'distribute'))
+ installer = Installer(unpack_path, interpreter=self._interpreter,
+ strict=(link.name != 'distribute'))
with TRACER.timed('Distilling %s' % link.name):
try:
dist = installer.distribution()
@@ -113,9 +137,10 @@ class EggTranslator(TranslatorBase):
class Translator(object):
@staticmethod
- def default(install_cache=None, platform=Platform.current(), python=Platform.python(),
+ def default(install_cache=None, platform=Platform.current(), interpreter=PythonInterpreter.get(),
conn_timeout=None):
return ChainedTranslator(
- EggTranslator(install_cache=install_cache, platform=platform, python=python,
+ EggTranslator(install_cache=install_cache, platform=platform, python=interpreter.python,
conn_timeout=conn_timeout),
- SourceTranslator(install_cache=install_cache, conn_timeout=conn_timeout))
+ SourceTranslator(install_cache=install_cache, interpreter=interpreter,
+ conn_timeout=conn_timeout))
diff --git a/src/python/twitter/common/recordio/BUILD b/src/python/twitter/common/recordio/BUILD
index b7e6935..fe1114b 100644
--- a/src/python/twitter/common/recordio/BUILD
+++ b/src/python/twitter/common/recordio/BUILD
@@ -28,6 +28,6 @@ python_library(
sources = ['thrift_recordio.py'],
dependencies = [
pants(':recordio'),
- python_requirement('thrift')
+ python_requirement('thrift', use_2to3=True)
]
)
diff --git a/src/python/twitter/mesos/BUILD b/src/python/twitter/mesos/BUILD
index ec35fa6..ea615a6 100644
--- a/src/python/twitter/mesos/BUILD
+++ b/src/python/twitter/mesos/BUILD
@@ -52,6 +52,7 @@ python_binary(
pants('src/python/twitter/common/app'),
pants('src/python/twitter/common/log')
],
+ compatibility = ['CPython<3'],
platforms = (
'linux-x86_64',
'macosx-10.6-x86_64',
diff --git a/src/python/twitter/mesos/executor/BUILD b/src/python/twitter/mesos/executor/BUILD
index 007b36b..084b687 100644
--- a/src/python/twitter/mesos/executor/BUILD
+++ b/src/python/twitter/mesos/executor/BUILD
@@ -119,7 +119,7 @@ python_library(
pants('src/thrift/com/twitter/thermos:py-thrift'),
pants(':thermos_executor_base'),
pants(':health_interfaces'),
- python_requirement('mesos'),
+ python_requirement('mesos', compatibility=['>=2.6,<2.7']), # only 2.6 versions
]
)
@@ -133,7 +133,7 @@ python_library(
pants('src/python/twitter/common/log'),
pants('src/thrift/com/twitter/mesos/gen:py-thrift'),
pants('src/thrift/com/twitter/thermos:py-thrift'),
- python_requirement('mesos'),
+ python_requirement('mesos', compatibility=['>=2.6,<2.7']), # only 2.6 versions
]
)
diff --git a/src/python/twitter/pants/BUILD b/src/python/twitter/pants/BUILD
index 0c3f774..a61e075 100644
--- a/src/python/twitter/pants/BUILD
+++ b/src/python/twitter/pants/BUILD
@@ -19,9 +19,8 @@ PANTS_GARBAGE = rglobs('*.pyc') + rglobs('*~')
PANTS_RESOURCES = rglobs('*') - PANTS_SOURCES - PANTS_GARBAGE
# Only build pylint on py 2.x
-def pylint_build_filter():
- import sys
- return sys.version_info[0] == 2
+def pylint_build_filter(python, platform):
+ return python.startswith('2')
python_library(
name = 'pants-deps',
@@ -58,6 +57,7 @@ python_library(
name = 'twitter.pants',
version = '0.0.3',
description = 'the pants build tool',
+ zip_safe = True,
namespace_packages = [
'twitter',
'twitter.common',
diff --git a/src/python/twitter/pants/__init__.py b/src/python/twitter/pants/__init__.py
index fd13a14..177bbd6 100644
--- a/src/python/twitter/pants/__init__.py
+++ b/src/python/twitter/pants/__init__.py
@@ -96,7 +96,6 @@ artifact = Artifact
bundle = Bundle
credentials = Credentials
dependencies = jar_library = JarLibrary
-egg = PythonEgg
exclude = Exclude
fancy_pants = Pants
jar = JarDependency
@@ -143,12 +142,12 @@ def has_sources(target, extension=None):
extension.
"""
return isinstance(target, TargetWithSources) and (
- not extension or any(filter(lambda source: source.endswith(extension), target.sources)))
+ not extension or any(source.endswith(extension) for source in target.sources))
def has_resources(target):
"""Returns True if the target has an associated set of Resources."""
- return hasattr(target, 'resources') and target.resources
+ return hasattr(target, 'resources') and target.resources and not isinstance(target, PythonTarget)
def is_exported(target):
@@ -285,7 +284,6 @@ __all__ = (
'credentials',
'dependencies',
'exclude',
- 'egg',
'get_buildroot',
'get_scm',
'get_version',
diff --git a/src/python/twitter/pants/base/build_cache.py b/src/python/twitter/pants/base/build_cache.py
index 8eb1c15..149f275 100644
--- a/src/python/twitter/pants/base/build_cache.py
+++ b/src/python/twitter/pants/base/build_cache.py
@@ -23,18 +23,16 @@ from abc import ABCMeta, abstractmethod
from collections import namedtuple
from functools import partial
-from twitter.common.lang import Compatibility
+from twitter.common.lang import Compatibility, AbstractClass
from twitter.common.dirutil import safe_rmtree
CacheKey = namedtuple('CacheKey', ['sources', 'hash', 'filename'])
-class SourceScope(object):
+class SourceScope(AbstractClass):
"""Selects sources of a given scope from targets."""
- __metaclass__ = ABCMeta
-
@staticmethod
def for_selector(selector):
class Scope(SourceScope):
@@ -194,7 +192,7 @@ class BuildCache(object):
def _read_sha(self, cache_key):
try:
- with open(self._sha_file(cache_key), 'rb') as fd:
+ with open(self._sha_file(cache_key), 'r') as fd:
return fd.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
diff --git a/src/python/twitter/pants/base/revision.py b/src/python/twitter/pants/base/revision.py
index 3b560c2..545a7cd 100644
--- a/src/python/twitter/pants/base/revision.py
+++ b/src/python/twitter/pants/base/revision.py
@@ -16,7 +16,12 @@
import re
-from itertools import izip_longest
+try:
+ from itertools import izip_longest as zip_longest
+except ImportError:
+ from itertools import zip_longest
+
+from twitter.common.lang import Compatibility
class Revision(object):
@@ -88,12 +93,37 @@ class Revision(object):
"""Returns a list of this revision's components from most major to most minor."""
return list(self._components)
- def __cmp__(self, other):
- for ours, theirs in izip_longest(self._components, other._components, fillvalue=0):
- difference = cmp(ours, theirs)
+ def _compare(self, other):
+ def type_ordering(a):
+ if a is None:
+ return 0
+ elif isinstance(a, int):
+ return 1
+ elif isinstance(a, Compatibility.string):
+ return 2
+ else:
+ raise ValueError('Unexpected Semver ordering type')
+
+ def incoherent_cmp(a, b):
+ a_order, b_order = type_ordering(a), type_ordering(b)
+ if a is None and b is None:
+ return 0
+ if a_order == b_order:
+ return (a > b) - (a < b)
+ else:
+ return (a_order > b_order) - (a_order < b_order)
+
+ for ours, theirs in zip_longest(self._components, other._components, fillvalue=0):
+ difference = incoherent_cmp(ours, theirs)
if difference != 0:
return difference
return 0
+ def __lt__(self, other):
+ return self._compare(other) < 0
+
+ def __eq__(self, other):
+ return self._compare(other) == 0
+
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, ', '.join(map(repr, self._components)))
diff --git a/src/python/twitter/pants/base/target.py b/src/python/twitter/pants/base/target.py
index e172aea..0386613 100644
--- a/src/python/twitter/pants/base/target.py
+++ b/src/python/twitter/pants/base/target.py
@@ -128,15 +128,24 @@ class Target(object):
def resolve(self):
yield self
- def walk(self, work, predicate = None):
- """Performs a walk of this target's dependency graph visiting each node exactly once. If a
- predicate is supplied it will be used to test each target before handing the target to work and
- descending. Work can return targets in which case these will be added to the walk candidate set
- if not already walked."""
+ def closure(self):
+ return self.walk(lambda target: None)
- self._walk(set(), work, predicate)
+ def walk(self, work, predicate=None):
+ """
+ Performs a walk of this target's dependency graph visiting each node
+ exactly once. If a predicate is supplied it will be used to test each
+ target before handing the target to work and descending. Work can
+ return targets in which case these will be added to the walk candidate
+ set if not already walked.
+
+ Returns the set of visited targets.
+ """
+ walked = set()
+ self._walk(walked, work, predicate)
+ return walked
- def _walk(self, walked, work, predicate = None):
+ def _walk(self, walked, work, predicate=None):
for target in self.resolve():
if target not in walked:
walked.add(target)
diff --git a/src/python/twitter/pants/commands/__init__.py b/src/python/twitter/pants/commands/__init__.py
index d9a098c..a0704ea 100644
--- a/src/python/twitter/pants/commands/__init__.py
+++ b/src/python/twitter/pants/commands/__init__.py
@@ -31,7 +31,7 @@ class Command(object):
@staticmethod
def all_commands():
- return Command._commands.keys()
+ return list(Command._commands.keys())
_commands = {}
diff --git a/src/python/twitter/pants/commands/goal.py b/src/python/twitter/pants/commands/goal.py
index 00761b1..7326958 100644
--- a/src/python/twitter/pants/commands/goal.py
+++ b/src/python/twitter/pants/commands/goal.py
@@ -37,7 +37,7 @@ from twitter.common import log
from twitter.common.collections import OrderedSet
from twitter.common.dirutil import safe_mkdir, safe_rmtree
from twitter.common.lang import Compatibility
-from twitter.pants import get_buildroot, goal, group, has_sources, is_apt
+from twitter.pants import get_buildroot, goal, group, has_sources, is_apt, is_concrete
from twitter.pants.base import Address, BuildFile, Config, ParseContext, Target
from twitter.pants.base.rcfile import RcFile
from twitter.pants.commands import Command
@@ -333,6 +333,12 @@ class Goal(Command):
except (IOError, SyntaxError):
error(spec)
+ # TODO(wickman) Unify PythonTarget baseclass with InternalTarget in order to merge reification
+ for target in self.targets:
+ for subtarget in target.walk(lambda target: None):
+ if hasattr(subtarget, 'reify') and callable(subtarget.reify):
+ subtarget.reify()
+
self.phases = [Phase(goal) for goal in goals]
rcfiles = self.config.getdefault('rcfiles', type=list, default=[])
@@ -422,31 +428,15 @@ from twitter.pants.targets import (
ScalaLibrary,
ScalaTests,
ScalacPlugin)
-from twitter.pants.tasks.antlr_gen import AntlrGen
-from twitter.pants.tasks.binary_create import BinaryCreate
-from twitter.pants.tasks.bundle_create import BundleCreate
-from twitter.pants.tasks.checkstyle import Checkstyle
+
+
from twitter.pants.tasks.extract import Extract
from twitter.pants.tasks.filedeps import FileDeps
-from twitter.pants.tasks.idl_resolve import IdlResolve
-from twitter.pants.tasks.ivy_resolve import IvyResolve
from twitter.pants.tasks.jar_create import JarCreate
-from twitter.pants.tasks.java_compile import JavaCompile
-from twitter.pants.tasks.javadoc_gen import JavadocGen
-from twitter.pants.tasks.scaladoc_gen import ScaladocGen
-from twitter.pants.tasks.junit_run import JUnitRun
-from twitter.pants.tasks.jvm_run import JvmRun
from twitter.pants.tasks.markdown_to_html import MarkdownToHtml
from twitter.pants.tasks.nailgun_task import NailgunTask
from twitter.pants.tasks.listtargets import ListTargets
from twitter.pants.tasks.pathdeps import PathDeps
-from twitter.pants.tasks.prepare_resources import PrepareResources
-from twitter.pants.tasks.protobuf_gen import ProtobufGen
-from twitter.pants.tasks.scala_compile import ScalaCompile
-from twitter.pants.tasks.scala_repl import ScalaRepl
-from twitter.pants.tasks.specs_run import SpecsRun
-from twitter.pants.tasks.thrift_gen import ThriftGen
-from twitter.pants.tasks.scrooge_gen import ScroogeGen
class Invalidator(Task):
@@ -481,39 +471,78 @@ if NailgunTask.killall:
ng_killall.install('clean-all', first=True)
-# TODO(John Sirois): Resolve eggs
+from twitter.pants.tasks.python.establish_roots import EstablishRoots
+from twitter.pants.tasks.python.setup import SetupPythonEnvironment
+
+goal(
+ name='py-root',
+ action=EstablishRoots,
+ dependencies=['gen'],
+).install('root').with_description('Set up roots of python environments.')
+goal(
+ name='py-setup',
+ action=SetupPythonEnvironment,
+ dependencies=['root'],
+).install('setup').with_description('Initialize the python environment roots.')
+
+
+from twitter.pants.tasks.ivy_resolve import IvyResolve
+from twitter.pants.tasks.idl_resolve import IdlResolve
+from twitter.pants.tasks.python.resolve import PythonResolve
goal(
name='ivy',
action=IvyResolve,
dependencies=['gen']
-).install('resolve').with_description('Resolves jar dependencies and produces dependency reports.')
-
+).install('resolve').with_description('Resolves dependencies and produces dependency reports.')
+goal(
+ name='eggs',
+ action=PythonResolve,
+ dependencies=['setup']
+).install('resolve')
goal(
name='idl',
action=IdlResolve,
).install('resolve-idl').with_description('Resolves idl jar dependencies.')
-
goal(
name='extract',
action=Extract,
).install('resolve-idl')
+
+from twitter.pants.tasks.python.chroot import PythonChroot
+goal(
+ name='py-chroot',
+ action=PythonChroot,
+ dependencies=['resolve']
+).install('chroot').with_description('Populates python chrooted environments.')
+
+
# TODO(John Sirois): gen attempted as the sole Goal should gen for all known gen types but
# recognize flags to narrow the gen set
-goal(name='thrift', action=ThriftGen,
- dependencies=['resolve-idl']).install('gen').with_description('Generate code.')
-goal(name='scrooge', action=ScroogeGen,
- dependencies=['resolve-idl']).install('gen')
-goal(name='protoc', action=ProtobufGen,
- dependencies=['resolve-idl']).install('gen')
-goal(name='antlr', action=AntlrGen,
- dependencies=['resolve-idl']).install('gen')
+from twitter.pants.tasks.antlr_gen import AntlrGen
+from twitter.pants.tasks.protobuf_gen import ProtobufGen
+from twitter.pants.tasks.thrift_gen import ThriftGen
+from twitter.pants.tasks.scrooge_gen import ScroogeGen
+
+goal(name='scrooge', action=ScroogeGen, dependencies=['resolve-idl']).install('gen')
+goal(name='thrift', action=ThriftGen, dependencies=['resolve-idl']).install('gen')
+goal(name='protoc', action=ProtobufGen, dependencies=['resolve-idl']).install('gen')
+goal(name='antlr', action=AntlrGen, dependencies=['resolve-idl']).install('gen')
+
+from twitter.pants.tasks.checkstyle import Checkstyle
+from twitter.pants.tasks.python.lint import PythonLintTask
goal(
name='checkstyle',
action=Checkstyle,
dependencies=['gen', 'resolve']
-).install().with_description('Run checkstyle against java source code.')
+).install().with_description('Run checkstyle against source code.')
+goal(
+ name='pylint',
+ action=PythonLintTask,
+ dependencies=['chroot']
+).install('checkstyle')
+
# TODO(John Sirois): These group predicates could simplify to simple has_sources checks except for
# the fact that sometimes 'aggregator' targets with no sources serve as a dependency link in the
@@ -529,12 +558,14 @@ def is_scala(target):
or (isinstance(target, (JvmBinary, junit_tests)) and has_sources(target, '.scala')))
+from twitter.pants.tasks.java_compile import JavaCompile
+from twitter.pants.tasks.python.compile import PythonCompile
+from twitter.pants.tasks.scala_compile import ScalaCompile
goal(name='scalac',
action=ScalaCompile,
group=group('jvm', is_scala),
dependencies=['gen', 'resolve']).install('compile').with_description(
- 'Compile both generated and checked in code.'
- )
+ 'Compile both generated and checked in code.')
goal(name='apt',
action=JavaCompile,
group=group('jvm', is_apt),
@@ -543,12 +574,18 @@ goal(name='javac',
action=JavaCompile,
group=group('jvm', is_java),
dependencies=['gen', 'resolve']).install('compile')
+goal(name='pyc',
+ action=PythonCompile,
+ dependencies=['resolve']).install('compile')
+from twitter.pants.tasks.prepare_resources import PrepareResources
goal(name='prepare', action=PrepareResources).install('resources')
# TODO(John Sirois): pydoc also
+from twitter.pants.tasks.javadoc_gen import JavadocGen
+from twitter.pants.tasks.scaladoc_gen import ScaladocGen
goal(name='javadoc',
action=JavadocGen,
dependencies=['compile']).install('doc').with_description('Create documentation.')
@@ -565,40 +602,93 @@ if MarkdownToHtml.AVAILABLE:
goal(name='jar',
action=JarCreate,
- dependencies=['compile', 'resources']).install('jar').with_description('Create one or more jars.')
+ dependencies=['compile', 'resources']
+).install('jar').with_description('Create one or more jars.')
+
+
+# TODO(John Sirois): Publish eggs in the publish phase
+from twitter.pants.tasks.jar_publish import JarPublish
+goal(name='publish',
+ action=JarPublish,
+ dependencies=[
+ 'javadoc',
+ 'jar'
+ ]).install().with_description('Publish one or more artifacts.')
+
+
+from twitter.pants.tasks.junit_run import JUnitRun
+from twitter.pants.tasks.specs_run import SpecsRun
+from twitter.pants.tasks.python.pytest_run import PytestRun
goal(name='junit',
action=JUnitRun,
dependencies=['compile', 'resources']).install('test').with_description('Test compiled code.')
goal(name='specs',
action=SpecsRun,
dependencies=['compile', 'resources']).install('test')
+goal(name='pytest',
+ action=PytestRun,
+ dependencies=['chroot']).install('test')
-# TODO(John Sirois): Create pex's in binary phase
+
+from twitter.pants.tasks.binary_create import BinaryCreate
+from twitter.pants.tasks.python.pex import CreatePex
goal(
name='binary',
action=BinaryCreate,
dependencies=['jar']
-).install().with_description('Create a jvm binary jar.')
+).install('binary').with_description('Create a binary jar or pex.')
+goal(
+ name='pex',
+ action=CreatePex,
+ dependencies=['chroot']
+).install('binary')
+
+
+from twitter.pants.tasks.bundle_create import BundleCreate
goal(
name='bundle',
action=BundleCreate,
dependencies=['binary']
).install().with_description('Create an application bundle from binary targets.')
+
+from twitter.pants.tasks.jvm_run import JvmRun
+from twitter.pants.tasks.python.run import PythonRun
goal(
name='jvm-run',
action=JvmRun,
dependencies=['compile', 'resources']
).install('run').with_description('Run a (currently JVM only) binary target.')
+goal(
+ name='python-run',
+ action=PythonRun,
+ dependencies=['chroot']
+).install('run').with_description('Run a (currently JVM only) binary target.')
+
+from twitter.pants.tasks.python.repl import PythonRepl
+from twitter.pants.tasks.scala_repl import ScalaRepl
goal(
name='scala-repl',
action=ScalaRepl,
dependencies=['compile', 'resources']
).install('repl').with_description(
'Run a (currently Scala only) REPL with the classpath set according to the targets.')
+goal(
+ name='python-repl',
+ action=PythonRepl,
+ dependencies=['chroot']
+).install('repl')
+
+
+from twitter.pants.tasks.python.setup_py import PythonSetupPy
+goal(
+ name='setup_py',
+ action=PythonSetupPy,
+).install().with_description('Generate external python artifacts')
+
goal(
name='filedeps',
@@ -616,8 +706,8 @@ goal(
action=ListTargets
).install('list').with_description('List available BUILD targets.')
-from twitter.pants.tasks.idea_gen import IdeaGen
+from twitter.pants.tasks.idea_gen import IdeaGen
goal(
name='idea',
action=IdeaGen,
@@ -626,7 +716,6 @@ goal(
from twitter.pants.tasks.eclipse_gen import EclipseGen
-
goal(
name='eclipse',
action=EclipseGen,
@@ -634,17 +723,9 @@ goal(
).install().with_description('Create an Eclipse project from the given targets.')
-from twitter.pants.tasks.python.setup import SetupPythonEnvironment
-
-goal(
- name='python-setup',
- action=SetupPythonEnvironment,
-).install('setup').with_description(
-"Setup the target's build environment.")
from twitter.pants.tasks.dependees import ReverseDepmap
-
goal(
name='dependees',
action=ReverseDepmap
@@ -652,7 +733,6 @@ goal(
from twitter.pants.tasks.depmap import Depmap
-
goal(
name='depmap',
action=Depmap
@@ -669,7 +749,6 @@ goal(
from twitter.pants.tasks.filemap import Filemap
-
goal(
name='filemap',
action=Filemap
@@ -678,7 +757,6 @@ goal(
from twitter.pants.tasks.minimal_cover import MinimalCover
-
goal(
name='minimize',
action=MinimalCover
@@ -686,7 +764,6 @@ goal(
from twitter.pants.tasks.filter import Filter
-
goal(
name='filter',
action=Filter
diff --git a/src/python/twitter/pants/goal/__init__.py b/src/python/twitter/pants/goal/__init__.py
index c43087e..d5b0d59 100644
--- a/src/python/twitter/pants/goal/__init__.py
+++ b/src/python/twitter/pants/goal/__init__.py
@@ -104,7 +104,9 @@ class Goal(object):
def prepare(self, context):
"""Prepares a Task that can be executed to achieve this goal."""
- return self._task(context)
+ task = self._task(context)
+ task.prepare()
+ return task
def install(self, phase=None, first=False, replace=False, before=None, after=None):
"""
diff --git a/src/python/twitter/pants/goal/context.py b/src/python/twitter/pants/goal/context.py
index 5093413..5f56238 100644
--- a/src/python/twitter/pants/goal/context.py
+++ b/src/python/twitter/pants/goal/context.py
@@ -13,6 +13,62 @@ from twitter.pants.base.target import Target
from twitter.pants.targets import Pants
from twitter.pants.goal.products import Products
+# NNN TODO(wickman) Sus out this API and possibly merge it with the Products
+# model, e.g. disk-based products vs memory-based 'state' and clarify the
+# namespaces.
+class State(object):
+ def __init__(self):
+ self._state = {}
+
+ @contextmanager
+ def __call__(self, *keys):
+ yield self.get(*keys)
+
+ @contextmanager
+ def list(self, *keys):
+ yield self._mutable_lookup(keys, list)
+
+ @contextmanager
+ def map(self, *keys):
+ yield self._mutable_lookup(keys, dict)
+
+ @contextmanager
+ def os(self, *keys):
+ yield self._mutable_lookup(keys, OrderedSet)
+
+ def set(self, *args):
+ keys, value = args[:-1], args[-1]
+ leaf, leaf_key = self._yield_leaf(keys)
+ leaf[leaf_key] = value
+
+ def get(self, *keys):
+ leaf, leaf_key = self._yield_leaf(keys)
+ return leaf.get(leaf_key)
+
+ def has(self, *keys):
+ leaf = self._state
+ for key in keys:
+ if key not in leaf:
+ return False
+ leaf = self._state[key]
+ return True
+
+ def _yield_leaf(self, keys):
+ leaf = self._state
+ keys, leaf_key = keys[:-1], keys[-1]
+ for key in keys:
+ if key not in leaf:
+ leaf[key] = {}
+ leaf = leaf[key]
+ return leaf, leaf_key
+
+ def _mutable_lookup(self, keys, leaf_factory):
+ leaf, leaf_key = self._yield_leaf(keys)
+ if leaf_key not in leaf:
+ leaf[leaf_key] = leaf_factory()
+ return leaf[leaf_key]
+
+
class Context(object):
"""Contains the context for a single run of pants.
@@ -28,16 +84,14 @@ class Context(object):
def info(self, msg): pass
def warn(self, msg): pass
-
def __init__(self, config, options, target_roots, lock=None, log=None, target_base=None):
self._config = config
self._options = options
self._lock = lock or Lock.unlocked()
self._log = log or Context.Log()
self._target_base = target_base or Target
- self._state = {}
+ self._state = State()
self._products = Products()
-
self.replace_targets(target_roots)
@property
@@ -66,6 +120,10 @@ class Context(object):
return self._products
@property
+ def state(self):
+ return self._state
+
+ @property
def target_roots(self):
"""Returns the targets specified on the command line.
@@ -78,7 +136,7 @@ class Context(object):
def identify(self, targets):
id = hashlib.md5()
for target in targets:
- id.update(target.id)
+ id.update(target.id.encode('utf8'))
return id.hexdigest()
def __str__(self):
@@ -88,12 +146,13 @@ class Context(object):
"""Replaces all targets in the context with the given roots and their transitive
dependencies.
"""
+ self.log.debug('Setting target roots to %r' % (target_roots,))
self._target_roots = list(target_roots)
-
self._targets = OrderedSet()
for target in self._target_roots:
self.add_target(target)
self.id = self.identify(self._targets)
+ self.log.debug('Setting root context id to %r' % self.id)
def add_target(self, target):
"""Adds a target and its transitive dependencies to the run context.
@@ -132,16 +191,19 @@ class Context(object):
If specified, the predicate will be used to narrow the scope of targets returned.
"""
- return filter(predicate, self._targets)
+ return list(filter(predicate, self._targets))
def dependants(self, on_predicate=None, from_predicate=None):
- """Returns a map from targets that satisfy the from_predicate to targets they depend on that
+ """Returns a map from targets that satisfy the from_predicate to targets they depend on that
satisfy the on_predicate.
"""
core = set(self.targets(on_predicate))
+ #self.log.debug('on_predicate targets: %s' % core)
dependees = defaultdict(set)
+ #self.log.debug('from_predicate targets: %s' % self.targets(from_predicate))
for target in self.targets(from_predicate):
if hasattr(target, 'dependencies'):
+ #self.log.debug('target (%s) has dependencies: %s' % (target, target.dependencies))
for dependency in target.dependencies:
if dependency in core:
dependees[target].add(dependency)
@@ -151,9 +213,3 @@ class Context(object):
"""Returns an iterator over the target(s) the given address points to."""
with ParseContext.temp():
return Pants(spec).resolve()
-
- @contextmanager
- def state(self, key, default=None):
- value = self._state.get(key, default)
- yield value
- self._state[key] = value
diff --git a/src/python/twitter/pants/goal/group.py b/src/python/twitter/pants/goal/group.py
index 8f342de..82ae13e 100644
--- a/src/python/twitter/pants/goal/group.py
+++ b/src/python/twitter/pants/goal/group.py
@@ -78,7 +78,7 @@ class Group(object):
else:
for chunk in Group.create_chunks(context, goals):
for goal in goals:
- goal_chunk = filter(goal.group.predicate, chunk)
+ goal_chunk = list(filter(goal.group.predicate, chunk))
if len(goal_chunk) > 0:
context.log.info('[%s:%s:%s]' % (phase, group_name, goal.name))
execution_phases[phase].add((group_name, goal.name))
diff --git a/src/python/twitter/pants/goal/phase.py b/src/python/twitter/pants/goal/phase.py
index 5d410a9..fca8fa0 100644
--- a/src/python/twitter/pants/goal/phase.py
+++ b/src/python/twitter/pants/goal/phase.py
@@ -208,7 +208,8 @@ class Phase(PhaseBase):
for phase in phases:
Group.execute(phase, tasks_by_goal, context, executed, timer)
- print_timings()
+ if timer:
+ print_timings()
return 0
except (TaskError, GoalError) as e:
message = '%s' % e
@@ -216,7 +217,8 @@ class Phase(PhaseBase):
print('\nFAILURE: %s\n' % e)
else:
print('\nFAILURE\n')
- print_timings()
+ if timer:
+ print_timings()
return 1
@staticmethod
@@ -261,7 +263,7 @@ class Phase(PhaseBase):
g = self.goals()
if replace:
del g[:]
- g_names = map(lambda goal: goal.name, g)
+ g_names = [gl.name for gl in g]
if first:
g.insert(0, goal)
elif before in g_names:
diff --git a/src/python/twitter/pants/goal/products.py b/src/python/twitter/pants/goal/products.py
index 56c07be..5878def 100644
--- a/src/python/twitter/pants/goal/products.py
+++ b/src/python/twitter/pants/goal/products.py
@@ -1,53 +1,56 @@
from collections import defaultdict
+from functools import reduce
-class Products(object):
- class ProductMapping(object):
- """
- Maps products of a given type by target. Its assumed that all products of a given type for
- a given target are emitted to a single base directory.
- """
- def __init__(self, typename):
- self.typename = typename
- self.by_target = defaultdict(lambda: defaultdict(list))
+class ProductMapping(object):
+ """
+ Maps products of a given type by target. Its assumed that all products of a given type for
+ a given target are emitted to a single base directory.
+ """
+
+ def __init__(self, typename):
+ self.typename = typename
+ self.by_target = defaultdict(lambda: defaultdict(list))
+
+ def add(self, target, basedir, product_paths=None):
+ """
+ Adds a mapping of products for the given target, basedir pair.
- def add(self, target, basedir, product_paths=None):
- """
- Adds a mapping of products for the given target, basedir pair.
+ If product_paths are specified, these will be added to the existing mapping for this target.
- If product_paths are specified, these will over-write any existing mapping for this target.
+ If product_paths is omitted, the current mutable list of mapped products for this target
+ and basedir is returned for appending.
+ """
+ if product_paths is not None:
+ self.by_target[target][basedir].extend(product_paths)
+ else:
+ return self.by_target[target][basedir]
- If product_paths is omitted, the current mutable list of mapped products for this target
- and basedir is returned for appending.
- """
- if product_paths is not None:
- self.by_target[target][basedir].extend(product_paths)
- else:
- return self.by_target[target][basedir]
+ def get(self, target):
+ """
+ Returns the product mapping for the given target as a tuple of (basedir, products list).
+ Can return None if there is no mapping for the given target.
+ """
+ return self.by_target.get(target)
- def get(self, target):
- """
- Returns the product mapping for the given target as a tuple of (basedir, products list).
- Can return None if there is no mapping for the given target.
- """
- return self.by_target.get(target)
+ def keys_for(self, basedir, filename):
+ """Returns the set of keys the given mapped product is registered under."""
+ keys = set()
+ for key, mappings in self.by_target.items():
+ for mapped in mappings.get(basedir, []):
+ if filename == mapped:
+ keys.add(key)
+ break
+ return keys
- def keys_for(self, basedir, file):
- """Returns the set of keys the given mapped product is registered under."""
- keys = set()
- for key, mappings in self.by_target.items():
- for mapped in mappings.get(basedir, []):
- if file == mapped:
- keys.add(key)
- break
- return keys
+ def __repr__(self):
+ return 'ProductMapping(%s) {\n %s\n}' % (self.typename, '\n '.join(
+ '%s => %s\n %s' % (str(target), basedir, outputs)
+ for target, outputs_by_basedir in self.by_target.items()
+ for basedir, outputs in outputs_by_basedir.items()))
- def __repr__(self):
- return 'ProductMapping(%s) {\n %s\n}' % (self.typename, '\n '.join(
- '%s => %s\n %s' % (str(target), basedir, outputs)
- for target, outputs_by_basedir in self.by_target.items()
- for basedir, outputs in outputs_by_basedir.items()))
+class Products(object):
def __init__(self):
self.products = {}
self.predicates_for_type = defaultdict(list)
@@ -59,7 +62,7 @@ class Products(object):
"""
if predicate:
self.predicates_for_type[typename].append(predicate)
- return self.products.setdefault(typename, Products.ProductMapping(typename))
+ return self.products.setdefault(typename, ProductMapping(typename))
def isrequired(self, typename):
"""
diff --git a/src/python/twitter/pants/python/antlr_builder.py b/src/python/twitter/pants/python/antlr_builder.py
index 1493833..edb4b38 100644
--- a/src/python/twitter/pants/python/antlr_builder.py
+++ b/src/python/twitter/pants/python/antlr_builder.py
@@ -20,39 +20,15 @@ __author__ = 'Benjy Weinberger'
import os
import sys
-import shutil
-import tempfile
import subprocess
-from twitter.common.dirutil.chroot import RelativeChroot
-from twitter.pants.python.egg_builder import EggBuilder
+from twitter.pants.python.code_generator import CodeGenerator
-class PythonAntlrBuilder(object):
+
+class PythonAntlrBuilder(CodeGenerator):
"""
Antlr builder.
"""
- class CodeGenerationException(Exception): pass
-
- def __init__(self, target, root_dir):
- self.target = target
- self.root = root_dir
- self.module = target.module
- distdir = os.path.join(self.root, 'dist')
- self.chroot = RelativeChroot(root_dir, distdir, target.name)
- codegen_root = tempfile.mkdtemp(dir=self.chroot.path(), prefix='codegen.')
- self.codegen_root = os.path.relpath(codegen_root, self.chroot.path())
- self.created_packages = set()
- self.created_namespace_packages = set()
-
- def __del__(self):
- self.cleanup()
-
- def packages(self):
- return self.created_packages
-
- def cleanup(self):
- shutil.rmtree(self.chroot.path())
-
def run_antlrs(self, output_dir):
args = [
'java',
@@ -92,16 +68,12 @@ class PythonAntlrBuilder(object):
print(comm[1], file=sys.stderr)
return rv == 0
- @staticmethod
- def path_to_module(path):
- return path.replace(os.path.sep, '.')
-
- def build_egg(self):
+ def generate(self):
gen_root = os.path.join(self.chroot.path(), self.codegen_root)
# Create the package structure.
path = gen_root
package = ''
- for module_name in self.module.split('.'):
+ for module_name in self.target.module.split('.'):
path = os.path.join(path, module_name)
if package == '':
package = module_name
@@ -109,34 +81,10 @@ class PythonAntlrBuilder(object):
package = package + '.' + module_name
os.mkdir(path)
with open(os.path.join(path, '__init__.py'), 'w') as f:
- if package != self.module: # Only write this in the non-leaf modules.
+ if package != self.target.module: # Only write this in the non-leaf modules.
f.write("__import__('pkg_resources').declare_namespace(__name__)")
self.created_namespace_packages.add(package)
self.created_packages.add(package)
# autogenerate the python files that we bundle up
self.run_antlrs(path)
-
- def dump_setup_py(packages, namespace_packages):
- boilerplate = """
-from setuptools import setup
-
-setup(name = "%(target_name)s",
- version = "dev",
- description = "autogenerated ANTLR parsers for %(target_name)s",
- package_dir = { "": "." },
- packages = %(packages)s,
- namespace_packages = %(namespace_packages)s)
-"""
- boilerplate = boilerplate % {
- 'target_name': self.target.name,
- 'packages': repr(packages),
- 'namespace_packages': repr(list(namespace_packages))
- }
-
- self.chroot.write(boilerplate, os.path.join(self.codegen_root, 'setup.py'))
- dump_setup_py(self.created_packages, self.created_namespace_packages)
-
- egg_root = os.path.join(self.chroot.path(), self.codegen_root)
- egg_path = EggBuilder().build_egg(egg_root, self.target)
- return egg_path
diff --git a/src/python/twitter/pants/python/code_generator.py b/src/python/twitter/pants/python/code_generator.py
new file mode 100644
index 0000000..2e7acab
--- /dev/null
+++ b/src/python/twitter/pants/python/code_generator.py
@@ -0,0 +1,94 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from __future__ import print_function
+
+import os
+import shutil
+import tempfile
+import textwrap
+
+from twitter.common.dirutil.chroot import RelativeChroot
+from twitter.pants.python.sdist_builder import SdistBuilder
+
+
+class CodeGenerator(object):
+ class Error(Exception): pass
+ class CodeGenerationException(Error): pass
+
+ def __init__(self, target, root_dir, target_suffix=None):
+ self.target = target
+ self.suffix = target_suffix or ''
+ self.root = root_dir
+ distdir = os.path.join(self.root, 'dist')
+ self.chroot = RelativeChroot(root_dir, distdir, target.name)
+ codegen_root = tempfile.mkdtemp(dir=self.chroot.path(), prefix='codegen.')
+ self.codegen_root = os.path.relpath(codegen_root, self.chroot.path())
+ self.created_packages = set()
+ self.created_namespace_packages = set()
+
+ def __del__(self):
+ self.cleanup()
+
+ def cleanup(self):
+ shutil.rmtree(self.chroot.path())
+
+ @staticmethod
+ def path_to_module(path):
+ return path.replace(os.path.sep, '.')
+
+ def package_name(self):
+ return '%s%s' % (self.target.id, self.suffix)
+
+ def requirement_string(self):
+ return '%s==0.0.0' % self.package_name()
+
+ def package_dir(self):
+ """Return the code generation root."""
+ return "."
+
+ def generate(self):
+ """Generate code for this target, updating the sets .created_packages and
+ .created_namespace_packages."""
+ raise NotImplementedError
+
+ def dump_setup_py(self):
+ boilerplate = textwrap.dedent("""
+ from setuptools import setup
+
+ setup(name = "%(package_name)s",
+ version = "0.0.0",
+ description = "autogenerated code for %(target_name)s",
+ package_dir = { "": %(package_dir)r },
+ packages = %(packages)s,
+ namespace_packages = %(namespace_packages)s)
+ """)
+ boilerplate = boilerplate % {
+ 'target_name': self.target.id,
+ 'package_name': self.package_name(),
+ 'package_dir': self.package_dir(),
+ 'target_name': self.target.name,
+ 'packages': repr(self.created_packages),
+ 'namespace_packages': repr(list(self.created_namespace_packages))
+ }
+ self.chroot.write(boilerplate.encode('utf8'), os.path.join(self.codegen_root, 'setup.py'))
+
+ def build(self):
+ self.generate()
+ self.dump_setup_py()
+ sdist_root = os.path.join(self.chroot.path(), self.codegen_root)
+ sdist_path = SdistBuilder.build(sdist_root, self.target)
+ return sdist_path
diff --git a/src/python/twitter/pants/python/egg_builder.py b/src/python/twitter/pants/python/egg_builder.py
deleted file mode 100644
index 460f168..0000000
--- a/src/python/twitter/pants/python/egg_builder.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# ==================================================================================================
-# Copyright 2011 Twitter, Inc.
-# --------------------------------------------------------------------------------------------------
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this work except in compliance with the License.
-# You may obtain a copy of the License in the LICENSE file, or at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==================================================================================================
-
-from __future__ import print_function
-
-__author__ = 'Benjy Weinberger'
-
-import glob
-import os
-import subprocess
-import sys
-from twitter.common.contextutil import environment_as, pushd
-
-class EggBuilder(object):
- """A helper class to create an egg."""
-
- class EggBuildingException(Exception): pass
-
- def __init__(self):
- pass
-
- def build_egg(self, egg_root, target):
- """Build an egg containing the files at egg_root for the specified target.
- There must be an egg_root/setup.py file."""
- # TODO(Brian Wickman): Do a sanity check somewhere to ensure that
- # setuptools is on the path?
- args = [
- sys.executable,
- 'setup.py', 'bdist_egg',
- '--dist-dir=dist',
- '--bdist-dir=build.%s' % target.name]
- with pushd(egg_root):
- with environment_as(PYTHONPATH = ':'.join(sys.path)):
- po = subprocess.Popen(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
- comm = po.communicate()
- rv = po.returncode
- eggs = os.path.abspath(os.path.join('dist', '*.egg'))
- eggs = glob.glob(eggs)
- if rv != 0 or len(eggs) != 1:
- print('egg generation failed (return value=%d, num eggs=%d)' % (rv, len(eggs)),
- file=sys.stderr)
- print('STDOUT', file=sys.stderr)
- print(comm[0], file=sys.stderr)
- print('STDERR', file=sys.stderr)
- print(comm[1], file=sys.stderr)
- raise EggBuilder.EggBuildingException(
- 'Generation of eggs failed for target = %s' % target)
- egg_path = eggs[0]
- return egg_path
diff --git a/src/python/twitter/pants/python/python_chroot.py b/src/python/twitter/pants/python/python_chroot.py
index bddb578..9e422bb 100644
--- a/src/python/twitter/pants/python/python_chroot.py
+++ b/src/python/twitter/pants/python/python_chroot.py
@@ -29,7 +29,7 @@ from twitter.common.python.interpreter import PythonIdentity
from twitter.common.python.pex_builder import PEXBuilder
from twitter.common.python.platforms import Platform
from twitter.pants import is_concrete
-from twitter.pants.base import Config
+from twitter.pants.base import Config, ParseContext
from twitter.pants.base.build_cache import BuildCache
from twitter.pants.targets import (
PythonAntlrLibrary,
@@ -39,61 +39,10 @@ from twitter.pants.targets import (
PythonTests,
PythonThriftLibrary)
-
from .antlr_builder import PythonAntlrBuilder
+from .resolver import MultiResolver
from .thrift_builder import PythonThriftBuilder
-import pkg_resources
-
-
-def get_platforms(platform_list):
- def translate(platform):
- return Platform.current() if platform == 'current' else platform
- return tuple(map(translate, platform_list))
-
-
-class MultiResolver(object):
- """
- A multi-platform Requirement resolver for Pants.
- """
- @classmethod
- def from_target(cls, config, target, conn_timeout=None):
- from twitter.common.python.fetcher import PyPIFetcher, Fetcher
- from twitter.common.python.resolver import Resolver
- from twitter.common.python.http import Crawler
- from twitter.common.quantity import Amount, Time
-
- conn_timeout_amount = Amount(conn_timeout, Time.SECONDS) if conn_timeout is not None else None
-
- crawler = Crawler(cache=config.get('python-setup', 'download_cache'),
- conn_timeout=conn_timeout_amount)
-
- fetchers = []
- fetchers.extend(Fetcher([url]) for url in config.getlist('python-repos', 'repos', []))
- fetchers.extend(PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', []))
-
- platforms = config.getlist('python-setup', 'platforms', ['current'])
- if isinstance(target, PythonBinary) and target.platforms:
- platforms = target.platforms
-
- return cls(
- platforms=get_platforms(platforms),
- resolver=Resolver(cache=config.get('python-setup', 'install_cache'),
- crawler=crawler,
- fetchers=fetchers,
- install_cache=config.get('python-setup', 'install_cache'),
- conn_timeout=conn_timeout_amount))
-
- def __init__(self, platforms, resolver):
- self._resolver = resolver
- self._platforms = platforms
-
- def resolve(self, requirements):
- requirements = list(requirements)
- for platform in self._platforms:
- self._resolver.resolve(requirements, platform=platform)
- return self._resolver.distributions()
-
class PythonChroot(object):
_VALID_DEPENDENCIES = {
@@ -116,7 +65,7 @@ class PythonChroot(object):
self._cache = BuildCache(os.path.join(self._config.get('python-setup', 'artifact_cache'),
'%s' % PythonIdentity.get()))
self._extra_targets = list(extra_targets) if extra_targets is not None else []
- self._resolver = MultiResolver.from_target(self._config, target, conn_timeout=conn_timeout)
+ self._resolver = MultiResolver(self._config, target, conn_timeout=conn_timeout)
self._builder = builder or PEXBuilder(tempfile.mkdtemp())
def __del__(self):
@@ -135,25 +84,19 @@ class PythonChroot(object):
return self._builder.path()
def _dump_library(self, library):
- def translate_module(module):
- if module is None:
- module = ''
- return module.replace('.', os.path.sep)
-
- def copy_to_chroot(base, path, relative_to, add_function):
+ def copy_to_chroot(base, path, add_function):
src = os.path.join(self._root, base, path)
- dst = os.path.join(translate_module(relative_to), path)
- add_function(src, dst)
+ add_function(src, path)
- self.debug(' Dumping library: %s [relative module: %s]' % (library, library.module))
+ self.debug(' Dumping library: %s' % library)
for filename in library.sources:
- copy_to_chroot(library.target_base, filename, library.module, self._builder.add_source)
+ copy_to_chroot(library.target_base, filename, self._builder.add_source)
for filename in library.resources:
- copy_to_chroot(library.target_base, filename, library.module, self._builder.add_resource)
+ copy_to_chroot(library.target_base, filename, self._builder.add_resource)
def _dump_requirement(self, req, dynamic, repo):
self.debug(' Dumping requirement: %s%s%s' % (str(req),
- ' (dynamic)' if dynamic else '', ' (repo: %s)' if repo else ''))
+ ' (dynamic)' if dynamic else '', ' (repo: %s)' % repo if repo else ''))
self._builder.add_requirement(req, dynamic, repo)
def _dump_distribution(self, dist):
@@ -165,33 +108,21 @@ class PythonChroot(object):
self.debug(' Dumping binary: %s' % binary_name)
self._builder.set_executable(src, os.path.basename(src))
- def _dump_thrift_library(self, library):
- self._dump_built_library(library, PythonThriftBuilder(library, self._root, self._config))
-
- def _dump_antlr_library(self, library):
- self._dump_built_library(library, PythonAntlrBuilder(library, self._root))
-
- def _dump_built_library(self, library, builder):
- # TODO(wickman) Have antlr/thrift generate sdists then leverage the rest of the
- # Fetcher pipeline.
- absolute_sources = library.expand_files()
- absolute_sources.sort()
- cache_key = self._cache.key_for(library.id, absolute_sources)
- if not self._cache.needs_update(cache_key):
- self.debug(' Generating (cached) %s...' % library)
- self._cache.use_cached_files(cache_key, self._builder.add_dependency_file)
- else:
- self.debug(' Generating %s...' % library)
- egg_file = builder.build_egg()
- if not egg_file:
- raise PythonChroot.BuildFailureException("Failed to build %s!" % library)
- src_egg_file = egg_file
- dst_egg_file = os.path.join(os.path.dirname(egg_file),
- cache_key.hash + '_' + os.path.basename(egg_file))
- self.debug(' %s => %s' % (src_egg_file, dst_egg_file))
- os.rename(src_egg_file, dst_egg_file)
- self._cache.update(cache_key, [dst_egg_file])
- self._builder.add_egg(dst_egg_file)
+ def _generate_requirement(self, library, builder_cls):
+ library_key = self._cache.key_for_target(library)
+ builder = builder_cls(library, self._root, '-' + library_key.hash[:8])
+ if self._cache.needs_update(library_key):
+ sdist = builder.build()
+ self._cache.update(library_key, build_artifacts=[sdist], artifact_root=os.path.dirname(sdist))
+ with ParseContext.temp():
+ return PythonRequirement(builder.requirement_string(), repository=library_key.filename,
+ use_2to3=True)
+
+ def _generate_thrift_requirement(self, library):
+ return self._generate_requirement(library, PythonThriftBuilder)
+
+ def _generate_antlr_requirement(self, library):
+ return self._generate_requirement(library, PythonAntlrBuilder)
def resolve(self, targets):
children = defaultdict(OrderedSet)
@@ -214,30 +145,31 @@ class PythonChroot(object):
for lib in targets['libraries']:
self._dump_library(lib)
- for req in targets['reqs']:
- if not req.should_build():
- self.debug('Skipping %s based upon version filter' % req)
- continue
- self._dump_requirement(req._requirement, req._dynamic, req._repository)
-
- for dist in self._resolver.resolve(
- req._requirement for req in targets['reqs'] if req.should_build()):
- self._dump_distribution(dist)
-
+ generated_reqs = OrderedSet()
if targets['thrifts']:
thrift_versions = set()
for thr in targets['thrifts']:
- self._dump_thrift_library(thr)
+ generated_reqs.add(self._generate_thrift_requirement(thr))
thrift_versions.add(thr.thrift_version)
if len(thrift_versions) > 1:
print('WARNING: Target has multiple thrift versions!')
for version in thrift_versions:
- self._builder.add_requirement('thrift==%s' % version)
- for dist in self._resolver.resolve('thrift==%s' % version for version in thrift_versions):
- self._dump_distribution(dist)
+ with ParseContext.temp():
+ generated_reqs.add(PythonRequirement('thrift==%s' % version, use_2to3=True))
for antlr in targets['antlrs']:
- self._dump_antlr_library(antlr)
+ generated_reqs.add(self._generate_antlr_requirement(antlr))
+
+ targets['reqs'] |= generated_reqs
+ for req in targets['reqs']:
+ if not req.should_build(Platform.python(), Platform.current()):
+ self.debug('Skipping %s based upon version filter' % req)
+ continue
+ self._dump_requirement(req._requirement, False, req._repository)
+
+ for dist in self._resolver.resolve(req for req in targets['reqs']
+ if req.should_build(Platform.python(), Platform.current())):
+ self._dump_distribution(dist)
if len(targets['binaries']) > 1:
print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)
diff --git a/src/python/twitter/pants/python/resolver.py b/src/python/twitter/pants/python/resolver.py
new file mode 100644
index 0000000..67a9049
--- /dev/null
+++ b/src/python/twitter/pants/python/resolver.py
@@ -0,0 +1,69 @@
+from twitter.common.collections import OrderedSet
+from twitter.common.python.fetcher import PyPIFetcher, Fetcher
+from twitter.common.python.http import Crawler
+from twitter.common.python.obtainer import Obtainer
+from twitter.common.python.platforms import Platform
+from twitter.common.python.resolver import ResolverBase
+from twitter.common.python.translator import (
+ ChainedTranslator,
+ EggTranslator,
+ SourceTranslator)
+
+from twitter.pants.targets import (
+ PythonBinary,
+ PythonRequirement)
+
+
+def get_platforms(platform_list):
+ def translate(platform):
+ return Platform.current() if platform == 'current' else platform
+ return tuple(map(translate, platform_list))
+
+
+class MultiResolver(ResolverBase):
+ """
+ A multi-platform PythonRequirement resolver for Pants.
+ """
+ @classmethod
+ def fetchers(cls, config):
+ fetchers = []
+ fetchers.extend(Fetcher([url]) for url in config.getlist('python-repos', 'repos', []))
+ fetchers.extend(PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', []))
+ return fetchers
+
+ @classmethod
+ def crawler(cls, config, conn_timeout=None):
+ return Crawler(cache=config.get('python-setup', 'download_cache'),
+ conn_timeout=conn_timeout)
+
+ def __init__(self, config, target, conn_timeout=None):
+ platforms = config.getlist('python-setup', 'platforms', ['current'])
+ if isinstance(target, PythonBinary) and target.platforms:
+ platforms = target.platforms
+
+ self._install_cache = config.get('python-setup', 'install_cache')
+ self._crawler = self.crawler(config, conn_timeout=conn_timeout)
+ self._fetchers = self.fetchers(config)
+ self._platforms = get_platforms(platforms)
+ super(MultiResolver, self).__init__(cache=self._install_cache)
+
+ def make_installer(self, reqs, interpreter, platform):
+ assert len(reqs) == 1 and isinstance(reqs[0], PythonRequirement), 'Got requirement list: %s' % (
+ repr(reqs))
+ req = reqs[0]
+ fetchers = [Fetcher([req.repository])] if req.repository else self._fetchers
+ translator = ChainedTranslator(
+ EggTranslator(install_cache=self._install_cache, platform=platform,
+ python=interpreter.python),
+ SourceTranslator(install_cache=self._install_cache, interpreter=interpreter,
+ use_2to3=req.use_2to3))
+ obtainer = Obtainer(self._crawler, fetchers, translator)
+ return obtainer.obtain
+
+ def resolve(self, requirements):
+ resolved = OrderedSet()
+ requirements = list(requirements)
+ for platform in self._platforms:
+ for req in requirements:
+ resolved.update(super(MultiResolver, self).resolve(req, platform=platform))
+ return list(resolved)
diff --git a/src/python/twitter/pants/python/sdist_builder.py b/src/python/twitter/pants/python/sdist_builder.py
new file mode 100644
index 0000000..a5460b6
--- /dev/null
+++ b/src/python/twitter/pants/python/sdist_builder.py
@@ -0,0 +1,51 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from __future__ import print_function
+
+import glob
+import os
+import subprocess
+import sys
+from twitter.common.contextutil import pushd
+
+
+class SdistBuilder(object):
+ """A helper class to run setup.py projects."""
+
+ class Error(Exception): pass
+ class SetupError(Error): pass
+
+ def __init__(self):
+ pass
+
+ @classmethod
+ def build(cls, setup_root, target):
+ args = [sys.executable, 'setup.py', 'sdist', '--dist-dir=dist', '--formats=gztar']
+ with pushd(setup_root):
+ po = subprocess.Popen(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ comm = po.communicate()
+ rv = po.returncode
+ sdists = glob.glob(os.path.abspath(os.path.join('dist', '*.tar.gz')))
+ if rv != 0 or len(sdists) != 1:
+ print('sdist generation failed', file=sys.stderr)
+ print('STDOUT', file=sys.stderr)
+ print(comm[0], file=sys.stderr)
+ print('STDERR', file=sys.stderr)
+ print(comm[1], file=sys.stderr)
+ raise cls.SetupError('Generation of sdist failed for target %s' % target)
+ return sdists[0]
+
diff --git a/src/python/twitter/pants/python/test_builder.py b/src/python/twitter/pants/python/test_builder.py
index 2409d2d..c904515 100644
--- a/src/python/twitter/pants/python/test_builder.py
+++ b/src/python/twitter/pants/python/test_builder.py
@@ -126,8 +126,8 @@ class PythonTestBuilder(object):
PythonRequirement('pytest'),
PythonRequirement('pytest-cov'),
PythonRequirement('coverage'),
- PythonRequirement('unittest2', version_filter=lambda:sys.version_info[0]==2),
- PythonRequirement('unittest2py3k', version_filter=lambda:sys.version_info[0]==3)
+ PythonRequirement('unittest2', version_filter=lambda py, pl: py.startswith('2')),
+ PythonRequirement('unittest2py3k', version_filter=lambda py, pl: py.startswith('3'))
]
return PythonTestBuilder.TESTING_TARGETS
diff --git a/src/python/twitter/pants/python/thrift_builder.py b/src/python/twitter/pants/python/thrift_builder.py
index 38071e1..49f9580 100644
--- a/src/python/twitter/pants/python/thrift_builder.py
+++ b/src/python/twitter/pants/python/thrift_builder.py
@@ -16,45 +16,20 @@
from __future__ import print_function
-__author__ = 'Brian Wickman'
-
import os
import sys
-import tempfile
import subprocess
from twitter.common.dirutil import safe_rmtree
-from twitter.common.dirutil.chroot import RelativeChroot
-
-from twitter.pants.python.egg_builder import EggBuilder
+from twitter.pants.python.code_generator import CodeGenerator
from twitter.pants.targets.python_thrift_library import PythonThriftLibrary
from twitter.pants.thrift_util import calculate_compile_roots, select_thrift_binary
-class PythonThriftBuilder(object):
- """
- Thrift builder.
- """
- class UnknownPlatformException(Exception):
- def __init__(self, platform):
- Exception.__init__(self, "Unknown platform: %s!" % str(platform))
- class CodeGenerationException(Exception): pass
-
- def __init__(self, target, root_dir, config):
- self.target = target
- self.root = root_dir
- self.config = config
- distdir = os.path.join(self.root, 'dist')
- self.chroot = RelativeChroot(root_dir, distdir, target.name)
- codegen_root = tempfile.mkdtemp(dir=self.chroot.path(), prefix='codegen.')
- self.codegen_root = os.path.relpath(codegen_root, self.chroot.path())
- self.detected_packages = set()
- self.detected_namespace_packages = set()
-
- def __del__(self):
- self.cleanup()
- def packages(self):
- return self.detected_packages
+class PythonThriftBuilder(CodeGenerator):
+ class UnknownPlatformException(CodeGenerator.Error):
+ def __init__(self, platform):
+ super(UnknownPlatformException, self).__init__("Unknown platform: %s!" % str(platform))
def cleanup(self):
safe_rmtree(self.chroot.path())
@@ -102,60 +77,26 @@ class PythonThriftBuilder(object):
print(comm[1], file=sys.stderr)
return rv == 0
- @staticmethod
- def path_to_module(path):
- return path.replace(os.path.sep, '.')
+ def package_dir(self):
+ return "gen-py"
- def build_egg(self):
+ def generate(self):
# autogenerate the python files that we bundle up
self.run_thrifts()
- genpy_root = os.path.join(self.chroot.path(), self.codegen_root, 'gen-py')
+ genpy_root = os.path.join(self.chroot.path(), self.codegen_root, self.package_dir())
for dir, _, files in os.walk(os.path.normpath(genpy_root)):
reldir = os.path.relpath(dir, genpy_root)
if reldir == '.': continue
if '__init__.py' not in files: continue
init_py_abspath = os.path.join(dir, '__init__.py')
module_path = self.path_to_module(reldir)
- self.detected_packages.add(module_path)
- # A namespace package is one that is just a container for other
- # modules and subpackages. Setting their __init__.py files as follows
- # allows them to be distributed across multiple eggs. Without this you
- # couldn't have this egg share any package prefix with any other module
- # in any other egg or in the source tree.
- #
- # Note that the thrift compiler should always generate empty __init__.py
- # files, but we test for this anyway, just in case that changes.
+ self.created_packages.add(module_path)
if len(files) == 1 and os.path.getsize(init_py_abspath) == 0:
with open(init_py_abspath, 'wb') as f:
f.write(b"__import__('pkg_resources').declare_namespace(__name__)")
- self.detected_namespace_packages.add(module_path)
+ self.created_namespace_packages.add(module_path)
- if not self.detected_packages:
- raise PythonThriftBuilder.CodeGenerationException(
+ if not self.created_packages:
+ raise self.CodeGenerationException(
'No Thrift structures declared in %s!' % self.target)
-
- def dump_setup_py(packages, namespace_packages):
- boilerplate = """
-from setuptools import setup
-
-setup(name = "%(target_name)s",
- version = "dev",
- description = "autogenerated thrift bindings for %(target_name)s",
- package_dir = { "": "gen-py" },
- packages = %(packages)s,
- namespace_packages = %(namespace_packages)s)
-"""
- boilerplate = boilerplate % {
- 'target_name': self.target._create_id(),
- 'genpy_root': genpy_root,
- 'packages': repr(list(packages)),
- 'namespace_packages': repr(list(namespace_packages))
- }
-
- self.chroot.write(boilerplate.encode('utf-8'), os.path.join(self.codegen_root, 'setup.py'))
- dump_setup_py(self.detected_packages, self.detected_namespace_packages)
-
- egg_root = os.path.join(self.chroot.path(), self.codegen_root)
- egg_path = EggBuilder().build_egg(egg_root, self.target)
- return egg_path
diff --git a/src/python/twitter/pants/scm/git.py b/src/python/twitter/pants/scm/git.py
index 0fff07d..d246972 100644
--- a/src/python/twitter/pants/scm/git.py
+++ b/src/python/twitter/pants/scm/git.py
@@ -55,7 +55,7 @@ class Git(Scm):
@property
def tag_name(self):
tag = self._check_output(['describe', '--always'], raise_type=Scm.LocalException)
- return None if b'cannot' in tag else self._cleanse(tag)
+ return None if 'cannot' in tag else self._cleanse(tag)
@property
def branch_name(self):
@@ -139,7 +139,7 @@ class Git(Scm):
out, _ = process.communicate()
self._check_result(cmd, process.returncode, failure_msg, raise_type)
- return out
+ return out.decode('utf8')
def _create_git_cmdline(self, args):
return [self._gitcmd, '--git-dir=%s' % self._gitdir, '--work-tree=%s' % self._worktree] + args
@@ -152,4 +152,4 @@ class Git(Scm):
raise raise_type(failure_msg or '%s failed with exit code %d' % (' '.join(cmd), result))
def _cleanse(self, output):
- return output.strip().decode('utf-8')
+ return output.strip()
diff --git a/src/python/twitter/pants/targets/__init__.py b/src/python/twitter/pants/targets/__init__.py
index 8ed77c4..7befadf 100644
--- a/src/python/twitter/pants/targets/__init__.py
+++ b/src/python/twitter/pants/targets/__init__.py
@@ -56,7 +56,6 @@ from .oink_query import OinkQuery
from .pants_target import Pants
from .python_artifact import PythonArtifact
from .python_binary import PythonBinary
-from .python_egg import PythonEgg
from .python_library import PythonLibrary
from .python_antlr_library import PythonAntlrLibrary
from .python_thrift_library import PythonThriftLibrary
@@ -97,7 +96,6 @@ __all__ = [
'Pants',
'PythonArtifact',
'PythonBinary',
- 'PythonEgg',
'PythonLibrary',
'PythonTarget',
'PythonAntlrLibrary',
diff --git a/src/python/twitter/pants/targets/doc.py b/src/python/twitter/pants/targets/doc.py
index 8cc9f15..aaefd66 100644
--- a/src/python/twitter/pants/targets/doc.py
+++ b/src/python/twitter/pants/targets/doc.py
@@ -61,4 +61,4 @@ class Page(InternalTarget, TargetWithSources):
def wikis(self):
"""Returns all the wikis registered with this page."""
- return self._wikis.keys()
+ return list(self._wikis.keys())
diff --git a/src/python/twitter/pants/targets/internal.py b/src/python/twitter/pants/targets/internal.py
index 484f9a1..68f450d 100644
--- a/src/python/twitter/pants/targets/internal.py
+++ b/src/python/twitter/pants/targets/internal.py
@@ -181,7 +181,7 @@ class InternalTarget(Target):
self.jar_dependencies.discard(dependency)
self.update_dependencies([replacement])
- def _walk(self, walked, work, predicate = None):
+ def _walk(self, walked, work, predicate=None):
Target._walk(self, walked, work, predicate)
for dep in self.dependencies:
if isinstance(dep, Target) and not dep in walked:
diff --git a/src/python/twitter/pants/targets/python_artifact.py b/src/python/twitter/pants/targets/python_artifact.py
index 21af9d8..ad1b597 100644
--- a/src/python/twitter/pants/targets/python_artifact.py
+++ b/src/python/twitter/pants/targets/python_artifact.py
@@ -14,7 +14,6 @@
# limitations under the License.
# ==================================================================================================
-from twitter.pants.base.generator import TemplateData
from .python_target import PythonTarget
@@ -55,7 +54,7 @@ class PythonArtifact(object):
@library.setter
def library(self, value):
- assert isinstance(value, PythonTarget)
+ assert isinstance(value, PythonTarget), 'Python artifacts must be bound to Python targets!'
self._library = value
@property
diff --git a/src/python/twitter/pants/targets/python_binary.py b/src/python/twitter/pants/targets/python_binary.py
index 4e33799..39375b9 100644
--- a/src/python/twitter/pants/targets/python_binary.py
+++ b/src/python/twitter/pants/targets/python_binary.py
@@ -34,7 +34,7 @@ class PythonBinary(PythonTarget):
ignore_errors=False,
allow_pypi=False,
platforms=(),
- interpreters=(Platform.python(),),
+ compatibility=None,
provides=None):
"""
name: target name
@@ -59,9 +59,6 @@ class PythonBinary(PythonTarget):
management
platforms: extra platforms to target when building this binary.
-
- interpreters: the interpreter versions to target when building this binary. by default the
- current interpreter version (specify in the form: '2.6', '2.7', '3.2' etc.)
"""
if source is None and dependencies is None:
raise TargetDefinitionException(
@@ -71,13 +68,10 @@ class PythonBinary(PythonTarget):
'Can only declare an entry_point if no source binary is specified.')
if not isinstance(platforms, (list, tuple)) and not isinstance(platforms, Compatibility.string):
raise TargetDefinitionException('platforms must be a list, tuple or string.')
- if not isinstance(interpreters, (list, tuple)):
- raise TargetDefinitionException('interpreters must be a list or tuple.')
self._entry_point = entry_point
self._inherit_path = bool(inherit_path)
self._zip_safe = bool(zip_safe)
- self._interpreters = interpreters
self._repositories = repositories or []
self._indices = indices or []
self._allow_pypi = bool(allow_pypi)
@@ -90,9 +84,14 @@ class PythonBinary(PythonTarget):
self._platforms = tuple(self._platforms)
PythonTarget.__init__(self, name, [] if source is None else [source],
+ compatibility=compatibility,
dependencies=dependencies,
provides=provides)
@property
def platforms(self):
return self._platforms
+
+ @property
+ def entry_point(self):
+ return self._entry_point
\ No newline at end of file
diff --git a/src/python/twitter/pants/targets/python_requirement.py b/src/python/twitter/pants/targets/python_requirement.py
index 4c61838..b97e4b9 100644
--- a/src/python/twitter/pants/targets/python_requirement.py
+++ b/src/python/twitter/pants/targets/python_requirement.py
@@ -17,25 +17,65 @@
__author__ = 'Brian Wickman'
from pkg_resources import Requirement
+
from twitter.pants.base import Target
from .external_dependency import ExternalDependency
+
class PythonRequirement(Target, ExternalDependency):
"""Pants wrapper around pkg_resources.Requirement"""
- def __init__(self, requirement, dynamic=False, repository=None, name=None, version_filter=None):
+ def __init__(self, requirement, repository=None, version_filter=None, use_2to3=False,
+ compatibility=None):
self._requirement = Requirement.parse(requirement)
- self._name = name or self._requirement.project_name
- self._dynamic = dynamic
self._repository = repository
- self._version_filter = version_filter or (lambda: True)
- Target.__init__(self, self._name)
+ self._use_2to3 = use_2to3
+ self._version_filter = version_filter or (lambda py, pl: True)
+ # TODO(wickman) Unify this with PythonTarget .compatibility
+ self.compatibility = compatibility or ['']
+ Target.__init__(self, self.project_name)
+
+ def should_build(self, python, platform):
+ return self._version_filter(python, platform)
+
+ @property
+ def use_2to3(self):
+ return self._use_2to3
+
+ @property
+ def repository(self):
+ return self._repository
+
+ # duck-typing Requirement interface for Resolver, since Requirement cannot be
+ # subclassed (curses!)
+ @property
+ def key(self):
+ return self._requirement.key
+
+ @property
+ def extras(self):
+ return self._requirement.extras
+
+ @property
+ def specs(self):
+ return self._requirement.specs
+
+ @property
+ def project_name(self):
+ return self._requirement.project_name
+
+ @property
+ def requirement(self):
+ return self._requirement
+
+ def __eq__(self, other):
+ return isinstance(other, PythonRequirement) and self._requirement == other._requirement
- def size(self):
- return 1
+ def __hash__(self):
+ return hash(self._requirement)
- def should_build(self):
- return self._version_filter()
+ def __contains__(self, item):
+ return item in self._requirement
def cache_key(self):
return str(self._requirement)
diff --git a/src/python/twitter/pants/targets/python_target.py b/src/python/twitter/pants/targets/python_target.py
index 022f861..43c3a6d 100644
--- a/src/python/twitter/pants/targets/python_target.py
+++ b/src/python/twitter/pants/targets/python_target.py
@@ -15,20 +15,66 @@
# ==================================================================================================
from twitter.common.collections import OrderedSet
+from twitter.common.python.interpreter import PythonIdentity
from twitter.pants.base import Target
+from twitter.pants.base.target import TargetDefinitionException
from twitter.pants.targets.with_sources import TargetWithSources
+
class PythonTarget(TargetWithSources):
- def __init__(self, name, sources, resources=None, dependencies=None, provides=None):
- TargetWithSources.__init__(self, name, sources=sources)
+ def __init__(self,
+ name,
+ sources,
+ resources=None,
+ dependencies=None,
+ provides=None,
+ compatibility=None):
+ """
+ Python Target base class.
+
+ Additional arguments:
+ provides:
+ A PythonArtifact that this PythonTarget should export if used with 'setup_py'.
+ See the documentation of PythonArtifact for more information.
+
+ compatibility:
+ A list of filters indicating interpreter compatibility of this
+ target. The default, None, means that this target works with
+ any interpreter class.
+
+ Examples:
+ # Specified without interpreter sub-version
+ ['>=2.6,<3']
+ # specified with interpreter subversion
+ ['CPython>=2.6.5,<3', 'PyPy>=2.7', 'Jython>=2.6']
+
+ In general, Pants will try to use the least common denominator
+ interpreter constructed from the transitive closure of all targets
+ underneath the target root (PythonBinary, PythonTest).
+
+ It can be further constrained by --python-setup-interpreter
+ """
+ TargetWithSources.__init__(self, name, sources=sources)
self.resources = self._resolve_paths(self.target_base, resources) if resources else OrderedSet()
self.dependencies = OrderedSet(dependencies) if dependencies else OrderedSet()
self.provides = provides
+ self.compatibility = compatibility or ['']
+ for req in self.compatibility:
+ try:
+ PythonIdentity.parse_requirement(req)
+ except ValueError as e:
+ raise TargetDefinitionException(str(e))
if self.provides:
self.provides.library = self
- def _walk(self, walked, work, predicate = None):
+ def reify(self):
+ reified_dependencies = OrderedSet()
+ for dependency in self.dependencies:
+ reified_dependencies.update(dependency.resolve())
+ self.dependencies = reified_dependencies
+
+ def _walk(self, walked, work, predicate=None):
Target._walk(self, walked, work, predicate)
for dependency in self.dependencies:
for dep in dependency.resolve():
diff --git a/src/python/twitter/pants/targets/python_thrift_library.py b/src/python/twitter/pants/targets/python_thrift_library.py
index 7144272..72cbdbb 100644
--- a/src/python/twitter/pants/targets/python_thrift_library.py
+++ b/src/python/twitter/pants/targets/python_thrift_library.py
@@ -14,8 +14,6 @@
# limitations under the License.
# ==================================================================================================
-from twitter.common.collections import OrderedSet
-from .python_requirement import PythonRequirement
from .python_target import PythonTarget
diff --git a/src/python/twitter/pants/targets/with_sources.py b/src/python/twitter/pants/targets/with_sources.py
index f360d49..553be38 100644
--- a/src/python/twitter/pants/targets/with_sources.py
+++ b/src/python/twitter/pants/targets/with_sources.py
@@ -106,5 +106,7 @@ class TargetWithSources(Target):
os.path.join(get_buildroot(), self.target_base))
resolve_basepath = os.path.join(get_buildroot(), rel_base, src_relpath)
- with pushd(resolve_basepath):
- return [ os.path.normpath(os.path.join(src_relpath, path)) for path in flatten_paths(paths) ]
+ if os.path.exists(resolve_basepath):
+ with pushd(resolve_basepath):
+ return [os.path.normpath(os.path.join(src_relpath, path)) for path in flatten_paths(paths)]
+ return []
diff --git a/src/python/twitter/pants/tasks/__init__.py b/src/python/twitter/pants/tasks/__init__.py
index d702bf9..e622386 100644
--- a/src/python/twitter/pants/tasks/__init__.py
+++ b/src/python/twitter/pants/tasks/__init__.py
@@ -49,6 +49,8 @@ class Task(object):
"""
Executes this task against the given targets which may be a subset of the current context
targets.
+
+ Can rewrite the dependency graph for downstream tasks.
"""
def invalidate_for(self):
@@ -58,6 +60,17 @@ class Task(object):
between runs all targets will be invalidated.
"""
+ def prepare(self):
+ """
+ Subclass to rewrite the dependency graph for upstream tasks (the dual to execute().)
+
+ During an execution of a pants run, a proper topological ordering of goals is computed.
+ This method is called on each task in reverse topological order, then execute is called
+ on each task in topological order. Use this method to inject upstream dependencies, e.g.
+ inject PythonRequirement("pylint") on PythonTest target dependencies if the PyLint Task
+ has been specified.
+ """
+
class CacheManager(object):
"""
Manages cache checks, updates and invalidation keeping track of basic change and invalidation
@@ -153,7 +166,7 @@ class Task(object):
check = self.invalidate_for()
if check is not None:
- with safe_open(self._extradata, 'w') as pickled:
+ with safe_open(self._extradata, 'wb') as pickled:
pickle.dump(check, pickled)
cache_key = cache_manager.check_content(Task.EXTRA_DATA, [self._extradata])
@@ -192,7 +205,7 @@ class Task(object):
))
try:
- yield cache_manager.changed.keys()
+ yield list(cache_manager.changed.keys())
for cache_keys in cache_manager.changed.values():
for cache_key in cache_keys:
cache_manager.update(cache_key)
diff --git a/src/python/twitter/pants/tasks/checkstyle.py b/src/python/twitter/pants/tasks/checkstyle.py
index 454099f..6e464d6 100644
--- a/src/python/twitter/pants/tasks/checkstyle.py
+++ b/src/python/twitter/pants/tasks/checkstyle.py
@@ -52,7 +52,7 @@ class Checkstyle(NailgunTask):
def execute(self, targets):
if not self.context.options.checkstyle_skip:
- with self.changed(filter(Checkstyle._is_checked, targets)) as changed_targets:
+ with self.changed(list(filter(Checkstyle._is_checked, targets))) as changed_targets:
sources = self.calculate_sources(changed_targets)
if sources:
result = self.checkstyle(sources)
@@ -68,7 +68,7 @@ class Checkstyle(NailgunTask):
def checkstyle(self, sources):
classpath = self.profile_classpath(self._profile)
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
classpath.extend(jar for conf, jar in cp if conf in self._confs)
opts = [
diff --git a/src/python/twitter/pants/tasks/code_gen.py b/src/python/twitter/pants/tasks/code_gen.py
index 9bb2d51..1a5d554 100644
--- a/src/python/twitter/pants/tasks/code_gen.py
+++ b/src/python/twitter/pants/tasks/code_gen.py
@@ -21,6 +21,7 @@ from collections import defaultdict
from twitter.pants import get_buildroot
from twitter.pants.tasks import Task
+
class CodeGen(Task):
"""
Encapsulates the common machinery for codegen targets that support multiple output languages.
@@ -76,6 +77,10 @@ class CodeGen(Task):
target.dependencies.add(dependency)
def execute(self, targets):
+ if any(self.is_gentarget(tgt) for tgt in targets):
+ self.context.log.debug('CodeGen task got usable targets:')
+ for t in filter(self.is_gentarget, targets):
+ self.context.log.debug(' %s' % t)
gentargets = [t for t in targets if self.is_gentarget(t)]
capabilities = self.genlangs() # lang_name => predicate
gentargets_by_dependee = self.context.dependants(
@@ -109,12 +114,17 @@ class CodeGen(Task):
for lang, tgts in gentargets_bylang.items():
lang_changed = changed.intersection(tgts)
if lang_changed:
+ self.context.log.debug('Running code generation due to changed %r target: %r' % (
+ lang, lang_changed))
self.genlang(lang, lang_changed)
# Link synthetic targets for all in-play gen targets
for lang, tgts in gentargets_bylang.items():
if tgts:
langtarget_by_gentarget = {}
+ # TODO(wickman) createtarget should be able to return a list of targets.
+ # alternately, return the meta-target that resolves to the list -- but this
+ # complicates things.
for target in tgts:
langtarget_by_gentarget[target] = self.createtarget(
lang,
@@ -130,4 +140,5 @@ class CodeGen(Task):
genmap.add(gentarget, get_buildroot(), [langtarget])
for dep in self.getdependencies(gentarget):
if self.is_gentarget(dep):
+ # NNN TODO(wickman) This seems to be broken in the Python model?
self.updatedependencies(langtarget, langtarget_by_gentarget[dep])
diff --git a/src/python/twitter/pants/tasks/console_task.py b/src/python/twitter/pants/tasks/console_task.py
index 1ad2dc3..698b37a 100644
--- a/src/python/twitter/pants/tasks/console_task.py
+++ b/src/python/twitter/pants/tasks/console_task.py
@@ -29,7 +29,8 @@ class ConsoleTask(Task):
def __init__(self, context, outstream=sys.stdout):
Task.__init__(self, context)
separator_option = "console_%s_separator" % self.__class__.__name__
- self._console_separator = getattr(context.options, separator_option).decode('string-escape')
+ console_separator = getattr(context.options, separator_option).encode('utf8')
+ self._console_separator = console_separator.decode('unicode-escape')
self._outstream = outstream
def execute(self, targets):
diff --git a/src/python/twitter/pants/tasks/eclipse_gen.py b/src/python/twitter/pants/tasks/eclipse_gen.py
index 1765de1..383fe27 100644
--- a/src/python/twitter/pants/tasks/eclipse_gen.py
+++ b/src/python/twitter/pants/tasks/eclipse_gen.py
@@ -124,7 +124,7 @@ class EclipseGen(IdeGen):
),
has_python=project.has_python,
has_scala=project.has_scala and not project.skip_scala,
- source_bases=source_bases.items(),
+ source_bases=list(source_bases.items()),
pythonpaths=pythonpaths,
debug_port=project.debug_port,
)
diff --git a/src/python/twitter/pants/tasks/filter.py b/src/python/twitter/pants/tasks/filter.py
index fff339b..888a132 100644
--- a/src/python/twitter/pants/tasks/filter.py
+++ b/src/python/twitter/pants/tasks/filter.py
@@ -43,9 +43,9 @@ def _extract_modifier(value):
def _create_filters(list_option, predicate):
for value in list_option:
modifier, value = _extract_modifier(value)
- predicates = map(predicate, value.split(','))
+ predicates = [predicate(v) for v in value.split(',')]
def filter(target):
- return modifier(any(map(lambda predicate: predicate(target), predicates)))
+ return modifier(any(predicate(target) for predicate in predicates))
yield filter
diff --git a/src/python/twitter/pants/tasks/ide_gen.py b/src/python/twitter/pants/tasks/ide_gen.py
index 0cd5674..7f78246 100644
--- a/src/python/twitter/pants/tasks/ide_gen.py
+++ b/src/python/twitter/pants/tasks/ide_gen.py
@@ -453,7 +453,8 @@ class Project(object):
def is_sibling(target):
return source_target(target) and target_dirset.intersection(find_source_basedirs(target))
- return filter(is_sibling, [ Target.get(a) for a in candidates if a != target.address ])
+ return list(filter(is_sibling,
+ [Target.get(a) for a in candidates if a != target.address]))
for target in self.targets:
target.walk(configure_target, predicate = source_target)
diff --git a/src/python/twitter/pants/tasks/ivy_resolve.py b/src/python/twitter/pants/tasks/ivy_resolve.py
index 68dfef4..3fba00f 100644
--- a/src/python/twitter/pants/tasks/ivy_resolve.py
+++ b/src/python/twitter/pants/tasks/ivy_resolve.py
@@ -146,7 +146,7 @@ class IvyResolve(NailgunTask):
"""Where we put the classpath file for this set of targets."""
sha = hashlib.sha1()
for t in targets:
- sha.update(t.id)
+ sha.update(t.id.encode('utf8'))
return sha.hexdigest()
target_workdir = os.path.join(self._work_dir, dirname_for_requested_targets(targets))
@@ -174,7 +174,7 @@ class IvyResolve(NailgunTask):
safe_link(target_ivyxml, ivyxml_symlink)
with self._cachepath(self._classpath_file) as classpath:
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
for path in classpath:
if self._map_jar(path):
for conf in self._confs:
diff --git a/src/python/twitter/pants/tasks/jar_publish.py b/src/python/twitter/pants/tasks/jar_publish.py
index 878922f..be22193 100644
--- a/src/python/twitter/pants/tasks/jar_publish.py
+++ b/src/python/twitter/pants/tasks/jar_publish.py
@@ -29,6 +29,7 @@ from collections import defaultdict
from twitter.common.collections import OrderedDict, OrderedSet
from twitter.common.config import Properties
from twitter.common.dirutil import safe_open, safe_rmtree
+from twitter.common.lang import Compatibility
from twitter.pants import (
binary_util,
@@ -48,6 +49,7 @@ from twitter.pants.targets import (
JavaThriftLibrary)
from twitter.pants.tasks import Task, TaskError
+
class Semver(object):
@staticmethod
def parse(version):
@@ -83,10 +85,7 @@ class Semver(object):
('%s-SNAPSHOT' % self.patch) if self.snapshot else self.patch
)
- def __eq__(self, other):
- return self.__cmp__(other) == 0
-
- def __cmp__(self, other):
+ def _compare(self, other):
diff = self.major - other.major
if not diff:
diff = self.minor - other.minor
@@ -101,6 +100,15 @@ class Semver(object):
diff = 0
return diff
+ def __lt__(self, other):
+ return self._compare(other) < 0
+
+ def __gt__(self, other):
+ return self._compare(other) > 0
+
+ def __eq__(self, other):
+ return self._compare(other) == 0
+
def __repr__(self):
return 'Semver(%s)' % self.version()
@@ -196,7 +204,7 @@ class DependencyWriter(object):
if jar.rev:
dependencies[(jar.org, jar.name)] = self.jardep(jar)
target_jar = self.internaldep(as_jar(target, is_tgt=True)).extend(
- dependencies=dependencies.values()
+ dependencies=list(dependencies.values())
)
template_kwargs = self.templateargs(target_jar, confs, synth)
@@ -391,7 +399,7 @@ class JarPublish(Task):
for repo, data in self.repos.items():
auth = data.get('auth')
if auth:
- credentials = context.resolve(auth).next()
+ credentials = next(context.resolve(auth))
user = credentials.username()
password = credentials.password()
self.context.log.debug('Found auth for repo: %s %s:%s' % (repo, user, password))
@@ -600,7 +608,8 @@ class JarPublish(Task):
print('\nChanges for %s since %s @ %s:\n\n%s' % (
coordinate(jar.org, jar.name), semver.version(), sha, changelog
))
- push = raw_input('Publish %s with revision %s ? [y|N] ' % (
+ input_func = input if Compatibility.PY3 else raw_input
+ push = input_func('Publish %s with revision %s ? [y|N] ' % (
coordinate(jar.org, jar.name), newver.version()
))
print('\n')
@@ -673,7 +682,7 @@ class JarPublish(Task):
self.commit_push(jar.org, jar.name, newver.version(), head_sha)
def check_targets(self, targets):
- invalid = filter(lambda (t, reason): reason, zip(targets, map(self.is_invalid, targets)))
+ invalid = [pr for pr in zip(targets, map(self.is_invalid, targets)) if pr[1]]
if invalid:
target_reasons = '\n\t'.join('%s: %s' % (tgt.address, reason) for tgt, reason in invalid)
params = dict(
diff --git a/src/python/twitter/pants/tasks/java_compile.py b/src/python/twitter/pants/tasks/java_compile.py
index 727b5ee..1291136 100644
--- a/src/python/twitter/pants/tasks/java_compile.py
+++ b/src/python/twitter/pants/tasks/java_compile.py
@@ -16,6 +16,7 @@
from collections import defaultdict
+from functools import reduce
import os
import shlex
@@ -54,7 +55,7 @@ _JMAKE_ERROR_CODES = {
-30: 'internal Java error (caused by java.lang.RuntimeException).'
}
# When executed via a subprocess return codes will be treated as unsigned
-_JMAKE_ERROR_CODES.update((256+code, msg) for code, msg in _JMAKE_ERROR_CODES.items())
+_JMAKE_ERROR_CODES.update((256+code, msg) for code, msg in _JMAKE_ERROR_CODES.copy().items())
def _is_java(target):
@@ -104,9 +105,9 @@ class JavaCompile(NailgunTask):
self._confs = context.config.getlist('java-compile', 'confs')
def execute(self, targets):
- java_targets = filter(_is_java, targets)
+ java_targets = list(filter(_is_java, targets))
if java_targets:
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
for conf in self._confs:
cp.insert(0, (conf, self._classes_dir))
diff --git a/src/python/twitter/pants/tasks/javadoc_gen.py b/src/python/twitter/pants/tasks/javadoc_gen.py
index ec3ad77..2bbf2cb 100644
--- a/src/python/twitter/pants/tasks/javadoc_gen.py
+++ b/src/python/twitter/pants/tasks/javadoc_gen.py
@@ -79,9 +79,9 @@ class JavadocGen(Task):
if catalog and self.combined:
raise TaskError('Cannot provide javadoc target mappings for combined output')
- with self.changed(filter(is_java, targets)) as changed_targets:
+ with self.changed(list(filter(is_java, targets))) as changed_targets:
safe_mkdir(self._output_dir)
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
classpath = [jar for conf, jar in cp if conf in self.confs]
def find_javadoc_targets():
diff --git a/src/python/twitter/pants/tasks/junit_run.py b/src/python/twitter/pants/tasks/junit_run.py
index 8c356ec..16dcd97 100644
--- a/src/python/twitter/pants/tasks/junit_run.py
+++ b/src/python/twitter/pants/tasks/junit_run.py
@@ -40,7 +40,7 @@ class JUnitRun(JvmTask):
action="callback", callback=mkflag.set_bool, default=False,
help = "[%default] Fail fast on the first test failure in a suite")
- option_group.add_option(mkflag("batch-size"), type = "int", default=sys.maxint,
+ option_group.add_option(mkflag("batch-size"), type = "int", default=sys.maxsize,
dest = "junit_run_batch_size",
help = "[ALL] Runs at most this many tests in a single test process.")
@@ -180,7 +180,7 @@ class JUnitRun(JvmTask):
def _partition(self, tests):
stride = min(self.batch_size, len(tests))
- for i in xrange(0, len(tests), stride):
+ for i in range(0, len(tests), stride):
yield tests[i:i+stride]
def execute(self, targets):
diff --git a/src/python/twitter/pants/tasks/jvm_binary_task.py b/src/python/twitter/pants/tasks/jvm_binary_task.py
index 65ef01a..bb1bd6c 100644
--- a/src/python/twitter/pants/tasks/jvm_binary_task.py
+++ b/src/python/twitter/pants/tasks/jvm_binary_task.py
@@ -79,7 +79,7 @@ class JvmBinaryTask(Task):
classpath_entry = {}
externaljars[(org, name)] = classpath_entry
classpath_entry[conf] = os.path.join(basedir, externaljar)
- return externaljars.values()
+ return list(externaljars.values())
def _unexcluded_dependencies(self, jardepmap, binary):
# TODO(John Sirois): Kill this and move jar exclusion to use confs
diff --git a/src/python/twitter/pants/tasks/jvm_run.py b/src/python/twitter/pants/tasks/jvm_run.py
index 28f0e53..3fa0e9d 100644
--- a/src/python/twitter/pants/tasks/jvm_run.py
+++ b/src/python/twitter/pants/tasks/jvm_run.py
@@ -70,7 +70,7 @@ class JvmRun(JvmTask):
self.context.lock.release()
# Run the first target that is a binary.
- binaries = filter(is_binary, targets)
+ binaries = list(filter(is_binary, targets))
if len(binaries) > 0: # We only run the first one.
main = binaries[0].main
result = runjava_indivisible(
diff --git a/src/python/twitter/pants/tasks/jvm_task.py b/src/python/twitter/pants/tasks/jvm_task.py
index e220c50..7876480 100644
--- a/src/python/twitter/pants/tasks/jvm_task.py
+++ b/src/python/twitter/pants/tasks/jvm_task.py
@@ -25,7 +25,7 @@ from twitter.pants.tasks import Task
class JvmTask(Task):
def classpath(self, cp=None, confs=None):
classpath = cp or []
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
classpath.extend(path for conf, path in cp if not confs or conf in confs)
if self.context.config.getbool('jvm', 'parallel_test_paths', default=False):
diff --git a/src/python/twitter/pants/tasks/prepare_resources.py b/src/python/twitter/pants/tasks/prepare_resources.py
index 1f49d56..4041b48 100644
--- a/src/python/twitter/pants/tasks/prepare_resources.py
+++ b/src/python/twitter/pants/tasks/prepare_resources.py
@@ -57,7 +57,7 @@ class PrepareResources(Task):
)
genmap = self.context.products.get('resources')
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
for resources in all_resources:
resources_dir = target_dir(resources)
genmap.add(resources, resources_dir, resources.sources)
diff --git a/src/python/twitter/pants/tasks/python/chroot.py b/src/python/twitter/pants/tasks/python/chroot.py
new file mode 100644
index 0000000..8d4ec4b
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/chroot.py
@@ -0,0 +1,92 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from __future__ import print_function
+
+__author__ = 'Brian Wickman'
+
+import os
+import tempfile
+
+from twitter.common.dirutil import safe_mkdir, register_rmtree
+from twitter.common.python.interpreter import PythonIdentity
+from twitter.common.python.pex_builder import PEXBuilder
+from twitter.common.python.platforms import Platform
+from twitter.pants.targets import PythonRequirement, PythonTarget
+from twitter.pants.tasks import Task
+
+from .target import is_python_root
+
+
+class PythonChroot(Task):
+ @classmethod
+ def setup_parser(cls, option_group, args, mkflag):
+ option_group.add_option(mkflag("multi"), dest="python_chroot_multi",
+ default=False, action='store_true',
+ help="Create multi-interpreter chroots rather than one chroot per.")
+ option_group.add_option(mkflag("save"), dest="python_chroot_save",
+ default=False, action='store_true',
+ help="Do not delete the chroots after creating them.")
+
+ def __init__(self, context):
+ super(PythonChroot, self).__init__(context)
+
+ def _dump_sources(self, builder, library):
+ def copy_to_chroot(base, path, add_function):
+ src = os.path.join(base, path)
+ add_function(src, path)
+ self.context.log.debug(' Dumping library: %s' % library)
+ for filename in library.sources:
+ copy_to_chroot(library.target_base, filename, builder.add_source)
+ for filename in library.resources:
+ copy_to_chroot(library.target_base, filename, builder.add_resource)
+
+ def _dump_requirement(self, builder, req, dynamic, repo):
+ self.context.log.debug(' Dumping requirement: %s%s%s' % (str(req),
+ ' (dynamic)' if dynamic else '', ' (repo: %s)' % repo if repo else ''))
+ builder.add_requirement(req, dynamic, repo)
+
+ def _dump_distribution(self, builder, dist):
+ self.context.log.debug(' Dumping distribution: %s' % os.path.basename(dist.location))
+ builder.add_distribution(dist)
+
+ def build(self, path, interpreter, closure, distributions):
+ builder = PEXBuilder(path, interpreter=interpreter)
+ for target in closure:
+ if isinstance(target, PythonTarget):
+ self._dump_sources(builder, target)
+ elif isinstance(target, PythonRequirement):
+ # TODO(wickman) upstream compatibility filter
+ if not target.should_build(interpreter.python, Platform.current()):
+ self.context.log.debug('Skipping %s based upon version filter' % target)
+ continue
+ self._dump_requirement(builder, target.requirement, False, target.repository)
+ for dist in distributions:
+ self._dump_distribution(builder, dist)
+ return builder
+
+ def execute(self, targets):
+ for target in self.context.targets(is_python_root):
+ for interpreter in target.interpreters:
+ path = os.path.join(self.context.config.getdefault('pants_distdir'),
+ interpreter.version_string,
+ target.name)
+ if not self.context.options.python_chroot_save:
+ register_rmtree(path)
+ safe_mkdir(path)
+ self.context.log.info('Building chroot %s => %s' % (target.name, path))
+ target.chroots[interpreter] = self.build(
+ path, interpreter, target.closure(), target.distributions[interpreter])
diff --git a/src/python/twitter/pants/tasks/python/chroot_task.py b/src/python/twitter/pants/tasks/python/chroot_task.py
new file mode 100644
index 0000000..31a7edc
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/chroot_task.py
@@ -0,0 +1,59 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from twitter.common.collections import OrderedSet
+
+from twitter.pants.targets import PythonBinary
+from twitter.pants.tasks import Task, TaskError
+
+from .target import is_python_root
+
+
+class ChrootTask(Task):
+ def sources(self, closure):
+ sources = OrderedSet()
+ for target in closure:
+ target.walk(lambda trg: sources.update(getattr(trg, 'sources', []) or []))
+ return list(sources)
+
+ def iter_roots(self):
+ return list(self.context.targets(is_python_root))
+
+ def iter_chroots(self):
+ for target in self.iter_roots():
+ for interpreter in target.interpreters:
+ yield target, interpreter, target.chroots[interpreter]
+
+ def select_chroot(self):
+ roots = self.iter_roots()
+ if len(roots) == 0:
+ raise TaskError('No chrootable targets specified!')
+ elif len(roots) > 1:
+ raise TaskError('Ambiguous chroot: multiple targets specified!')
+ target = roots[0]
+ if len(target.interpreters) == 0:
+ raise TaskError('No interpreters available for %s' % target)
+ elif len(target.interpreters) > 1:
+ raise TaskError('Ambiguous chroot: multiple interpreters specified for %s!' % target)
+ interpreter = target.interpreters[0]
+ return target.chroots[interpreter], interpreter
+
+ def select_binary(self):
+ target = self.iter_roots()[0]
+ binary_targets = target.select(PythonBinary)
+ if len(binary_targets) != 1:
+ raise TaskError('Ambiguous chroot: Should be exactly one binary target specified!')
+ return binary_targets.pop()
diff --git a/src/python/twitter/pants/tasks/python/compile.py b/src/python/twitter/pants/tasks/python/compile.py
new file mode 100644
index 0000000..4a2a4d1
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/compile.py
@@ -0,0 +1,22 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from twitter.pants.tasks import Task
+
+
+class PythonCompile(Task):
+ def execute(self, _):
+ pass
diff --git a/src/python/twitter/pants/tasks/python/establish_roots.py b/src/python/twitter/pants/tasks/python/establish_roots.py
new file mode 100644
index 0000000..292982a
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/establish_roots.py
@@ -0,0 +1,100 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from twitter.common.collections import OrderedSet
+
+from twitter.pants import is_concrete
+from twitter.pants.base import ParseContext
+from twitter.pants.tasks import Task
+from twitter.pants.targets import (
+ JarLibrary,
+ PythonTests,
+ PythonTestSuite,
+ PythonBinary,
+ PythonLibrary,
+ PythonRequirement
+)
+
+from .target import PythonRoot
+
+
+def is_python_root(target):
+ # return isinstance(target, PythonTests) or isinstance(target, PythonBinary)
+ return isinstance(target, (
+ PythonLibrary,
+ PythonTests,
+ PythonBinary,
+ PythonRequirement
+ ))
+
+
+class EstablishRoots(Task):
+ @classmethod
+ def setup_parser(cls, option_group, args, mkflag):
+ option_group.add_option(mkflag("union"), mkflag("union", negate=True),
+ dest="establish_roots_unioned", default=False,
+ action="callback", callback=mkflag.set_bool,
+ help="[%default] Run downstream python tasks with the union of "
+ "all targets.")
+ option_group.add_option(mkflag("empty"), dest="establish_roots_empty", default=False, action='store_true',
+ help="[%default] Establish an empty chroot if no targets specified. "
+ "Useful in conjunction with --resolve-eggs-req")
+ option_group.add_option(mkflag("union-name"),
+ dest="establish_roots_union_name", default=None,
+ help="The name of the synthetic unioned target. By default it will "
+ "be the first target name appended with '-union'.")
+
+ # TODO(wickman) Standardize meta-target unwrapping
+ @classmethod
+ def unwrap(cls, target, visited):
+ if target in visited:
+ return
+ visited.add(target)
+ if isinstance(target, PythonTestSuite):
+ for dependency in target.dependencies:
+ for t in cls.unwrap(dependency, visited):
+ yield t
+ elif isinstance(target, JarLibrary):
+ for dependency in target.resolve():
+ for t in cls.unwrap(dependency, visited):
+ yield t
+ else:
+ yield target
+
+ def walk_possible_roots(self):
+ visited = set()
+ for target in self.context.target_roots:
+ for child in filter(None, self.unwrap(target, visited)):
+ yield child
+
+ def roots(self):
+ return OrderedSet(filter(is_python_root, self.walk_possible_roots()))
+
+ def execute(self, _):
+ all_roots = self.roots()
+ if self.context.options.establish_roots_unioned:
+ union = PythonRoot.union(all_roots)
+ self.context.log.debug('Adding synthetic union root for %d targets: %s' % (
+ len(all_roots), union))
+ self.context.add_target(union)
+ else:
+ for target in all_roots:
+ self.context.log.debug('Adding root for %s' % target)
+ self.context.add_target(PythonRoot.of(target))
+ else:
+ if self.context.options.establish_roots_empty:
+ self.context.log.debug('Adding empty synthetic root.')
+ self.context.add_target(PythonRoot.union(all_roots))
diff --git a/src/python/twitter/pants/tasks/python/lint.py b/src/python/twitter/pants/tasks/python/lint.py
new file mode 100644
index 0000000..dd52559
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/lint.py
@@ -0,0 +1,46 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from twitter.common.python.pex import PEX
+
+from twitter.pants.base import ParseContext
+from twitter.pants.targets import PythonRequirement, PythonTarget
+
+from .chroot_task import ChrootTask
+
+
+class PythonLintTask(ChrootTask):
+ def pylint(self):
+ with ParseContext.temp():
+ return PythonRequirement('pylint', use_2to3=True)
+
+ def prepare(self):
+ self.context.log.debug('Pylint updating upstream depedencies.')
+ for target in self.context.targets():
+ # if isinstance(target, PythonRoot)
+ if isinstance(target, PythonTarget):
+ target.dependencies.add(self.pylint())
+
+ def execute(self, _):
+ pylintrc = self.context.config.get('python-lint', 'pylintrc')
+ for target, interpreter, chroot in self.iter_chroots():
+ clone = chroot.clone()
+ clone.set_entry_point('pylint.lint')
+ clone.freeze()
+ pex = PEX(pex=clone.path(), interpreter=interpreter)
+ args = ['--rcfile=%s' % pylintrc] if pylintrc else []
+ pex.run(args=args + self.sources(target.closure()), with_chroot=True)
+ clone.chroot().delete()
diff --git a/src/python/twitter/pants/tasks/python/pex.py b/src/python/twitter/pants/tasks/python/pex.py
new file mode 100644
index 0000000..fcdc0df
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/pex.py
@@ -0,0 +1,57 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+import os
+
+from twitter.common.collections import OrderedSet
+
+from twitter.pants.targets import PythonBinary
+from twitter.pants.tasks import Task, TaskError
+
+from .target import is_python_root
+
+
+class CreatePex(Task):
+ def select_entry_point(self, binary_target):
+ if binary_target.entry_point:
+ return binary_target.entry_point
+ if binary_target.sources:
+ assert len(binary_target.sources) == 1
+ return os.path.splitext(binary_target.sources[0])[0].replace(os.path.sep, '.')
+
+ def execute(self, _):
+ for target in self.context.targets(is_python_root):
+ binaries = target.select(PythonBinary)
+ if len(binaries) == 0:
+ continue
+ elif len(binaries) > 1:
+ raise TaskError('Cannot build %s because it contains multiple entry points: %s' % (
+ target, ' '.join(binary.name for binary in binaries)))
+
+ entry_point = self.select_entry_point(list(binaries)[0])
+ extended_names = len(target.interpreters) > 1
+
+
+ for interpreter in target.interpreters:
+ builder = target.chroots[interpreter]
+ builder = builder.clone()
+ builder.set_entry_point(entry_point or '')
+ pex_name = '%s.pex' % target.name if not extended_names else '%s_%s.pex' % (
+ target.name, '%d%d' % interpreter.version[0:2])
+ pex = os.path.join(self.context.config.getdefault('pants_distdir'), pex_name)
+ self.context.log.info('Building %s' % pex)
+ builder.build(pex)
+ builder.chroot().delete()
diff --git a/src/python/twitter/pants/tasks/python/pytest_run.py b/src/python/twitter/pants/tasks/python/pytest_run.py
new file mode 100644
index 0000000..0151c8e
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/pytest_run.py
@@ -0,0 +1,244 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from __future__ import print_function
+
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+from collections import defaultdict
+import errno
+import os
+import signal
+import sys
+import time
+
+from twitter.common.dirutil import safe_mkdir
+from twitter.common.lang import Compatibility
+from twitter.common.python.pex import PEX
+from twitter.common.quantity import Amount, Time
+
+from twitter.pants.base import ParseContext
+from twitter.pants.targets import (
+ PythonRequirement,
+ PythonTests)
+
+from .chroot_task import ChrootTask
+
+
+class PythonTestResult(object):
+ @staticmethod
+ def timeout():
+ return PythonTestResult('TIMEOUT')
+
+ @staticmethod
+ def exception():
+ return PythonTestResult('EXCEPTION')
+
+ @staticmethod
+ def rc(value):
+ return PythonTestResult('SUCCESS' if value == 0 else 'FAILURE',
+ rc=value)
+
+ def __init__(self, msg, rc=None):
+ self._rc = rc
+ self._msg = msg
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and (
+ self._rc == other._rc and self._msg == other._msg)
+
+ def __str__(self):
+ return self._msg
+
+ @property
+ def success(self):
+ return self._rc == 0
+
+
+DEFAULT_COVERAGE_CONFIG = """
+[run]
+branch = True
+timid = True
+
+[report]
+exclude_lines =
+ def __repr__
+ raise NotImplementedError
+
+ignore_errors = True
+"""
+
+
+
+class PytestRun(ChrootTask):
+ DEPENDENCIES = []
+
+ # TODO(wickman) Expose these as configurable parameters
+ TEST_TIMEOUT = Amount(2, Time.MINUTES)
+ TEST_POLL_PERIOD = Amount(100, Time.MILLISECONDS)
+
+ @classmethod
+ def setup_parser(cls, option_group, args, mkflag):
+ option_group.add_option(mkflag("junit-xml-base"), dest = "python_junit_xml_base",
+ default=None,
+ help="Generate JUnit XML output into this directory.")
+
+ option_group.add_option(mkflag("failsoft"), dest = "python_test_failsoft",
+ default=False, action="store_true",
+ help="Run all tests despite failures. Default is to stop at first failure.")
+
+ option_group.add_option(mkflag("coverage"), dest = "python_test_coverage",
+ default=False, action="store_true",
+ help="Create coverage report from tests.")
+
+ def test_dependencies(self):
+ if not self.DEPENDENCIES:
+ with ParseContext.temp():
+ self.DEPENDENCIES = [
+ PythonRequirement('pytest'),
+ PythonRequirement('pytest-cov'),
+ PythonRequirement('coverage'),
+ PythonRequirement('unittest2', version_filter=lambda py, pl: py.startswith('2')),
+ PythonRequirement('unittest2py3k', version_filter=lambda py, pl: py.startswith('3'))
+ ]
+ return self.DEPENDENCIES
+
+ def prepare(self):
+ for target in self.context.targets():
+ if isinstance(target, PythonTests):
+ target.dependencies.update(self.test_dependencies())
+
+ def junit_args(self, target):
+ args = []
+ xml_base = self.context.options.python_junit_xml_base
+ if xml_base:
+ xml_base = os.path.abspath(os.path.normpath(xml_base))
+ xml_path = os.path.join(
+ xml_base, os.path.dirname(target.address.buildfile.relpath), target.name + '.xml')
+ safe_mkdir(os.path.dirname(xml_path))
+ args.append('--junitxml=%s' % xml_path)
+ return args
+
+ def coverage_config(self, target):
+ cp = configparser.ConfigParser()
+ cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG))
+ cp.add_section('html')
+ target_dir = os.path.join(self.context.config.getdefault('pants_distdir'), 'coverage',
+ os.path.dirname(target.address.buildfile.relpath), target.name)
+ safe_mkdir(target_dir)
+ cp.set('html', 'directory', target_dir)
+ return cp, target_dir
+
+ def coverage_args(self, target):
+ cp, target_dir = self.coverage_config(target)
+ with open(os.path.join(target_dir, '.coveragerc'), 'w') as fp:
+ cp.write(fp)
+ filename = fp.name
+ if target.coverage:
+ source = target.coverage
+ else:
+ # This technically makes the assumption that tests/python/<target> will be testing
+ # src/python/<target>. To change to honest measurements, do target.walk() here insead,
+ # however this results in very useless and noisy coverage reports.
+ source = set(os.path.dirname(source).replace(os.sep, '.') for source in target.sources)
+ args = ['-p', 'pytest_cov',
+ '--cov-config', filename,
+ '--cov-report', 'html',
+ '--cov-report', 'term']
+ for module in source:
+ args.extend(['--cov', module])
+ return args
+
+ @classmethod
+ def wait_on(cls, popen, timeout=TEST_TIMEOUT):
+ total_wait = Amount(0, Time.SECONDS)
+ while total_wait < timeout:
+ rc = popen.poll()
+ if rc is not None:
+ return PythonTestResult.rc(rc)
+ total_wait += cls.TEST_POLL_PERIOD
+ time.sleep(cls.TEST_POLL_PERIOD.as_(Time.SECONDS))
+ popen.kill()
+ return PythonTestResult.timeout()
+
+ def _run_single_test(self, target, interpreter, chroot):
+ clone = chroot.clone()
+ clone.set_entry_point('pytest')
+ clone.freeze()
+
+ args = self.junit_args(target)
+ if self.context.options.python_test_coverage:
+ args.extend(self.coverage_args(target))
+
+ pex = PEX(pex=clone.path(), interpreter=interpreter)
+ sources = [os.path.join(target.target_base, source) for source in target.sources]
+
+ try:
+ po = pex.run(args=args + sources, blocking=False, setsid=True)
+ rv = self.wait_on(po, timeout=target.timeout)
+ except Exception as e:
+ import traceback
+ print('Failed to run test!', file=sys.stderr)
+ traceback.print_exc()
+ rv = PythonTestResult.exception()
+ finally:
+ if po and po.returncode != 0:
+ try:
+ os.killpg(po.pid, signal.SIGTERM)
+ except OSError as e:
+ if e.errno == errno.EPERM:
+ print("Unable to kill process group: %d" % po.pid)
+ elif e.errno != errno.ESRCH:
+ rv = PythonTestResult.exception()
+
+ clone.chroot().delete()
+ return rv
+
+ def output(self, results):
+ for target in sorted(results):
+ for interpreter in sorted(results[target]):
+ rv = results[target][interpreter]
+ outmethod = self.context.log.info if rv.success else self.context.log.warn
+ outmethod(' = %16s %-75s.....%s' % (interpreter.version_string, target, rv))
+
+ def execute(self, _):
+ self.context.lock.release()
+
+ results = defaultdict(dict)
+
+ fail_hard = not self.context.options.python_test_failsoft
+ if self.context.options.python_test_coverage:
+ # Coverage often throws errors despite tests succeeding, so make PANTS_PY_COVERAGE
+ # force FAILSOFT.
+ fail_hard = False
+
+ for root, interpreter, chroot in self.iter_chroots():
+ failed = False
+ for target in root.select(PythonTests):
+ rv = self._run_single_test(target, interpreter, chroot)
+ results[target.id][interpreter] = rv
+ if not rv.success:
+ failed = True
+ if fail_hard:
+ break
+ if failed and fail_hard:
+ break
+
+ self.output(results)
+
+ chroot.freeze()
diff --git a/src/python/twitter/pants/targets/python_egg.py b/src/python/twitter/pants/tasks/python/repl.py
similarity index 53%
rename from src/python/twitter/pants/targets/python_egg.py
rename to src/python/twitter/pants/tasks/python/repl.py
index 5950cb2..c159988 100644
--- a/src/python/twitter/pants/targets/python_egg.py
+++ b/src/python/twitter/pants/tasks/python/repl.py
@@ -1,5 +1,5 @@
# ==================================================================================================
-# Copyright 2011 Twitter, Inc.
+# Copyright 2012 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
@@ -14,30 +14,18 @@
# limitations under the License.
# ==================================================================================================
-__author__ = 'Brian Wickman'
+from twitter.common.dirutil import safe_rmtree
+from twitter.common.python.pex import PEX
-import os
-import glob
-from zipimport import zipimporter
+from .chroot_task import ChrootTask
-from pkg_resources import Distribution, EggMetadata, PathMetadata
-from twitter.pants.targets.python_requirement import PythonRequirement
-
-
-def PythonEgg(egg_glob):
- eggs = glob.glob(egg_glob)
-
- requirements = set()
- for egg in eggs:
- if os.path.isdir(egg):
- metadata = PathMetadata(egg, os.path.join(egg, 'EGG-INFO'))
- else:
- metadata = EggMetadata(zipimporter(egg))
- dist = Distribution.from_filename(egg, metadata=metadata)
- requirements.add(dist.as_requirement())
-
- if len(requirements) > 1:
- raise ValueError('Got multiple egg versions! => %s' % requirements)
-
- return PythonRequirement(str(requirements.pop()))
+class PythonRepl(ChrootTask):
+ def execute(self, _):
+ self.context.lock.release()
+ chroot, interpreter = self.select_chroot()
+ chroot = chroot.clone()
+ chroot.freeze()
+ pex = PEX(pex=chroot.path(), interpreter=interpreter)
+ pex.run()
+ chroot.chroot().delete()
diff --git a/src/python/twitter/pants/tasks/python/resolve.py b/src/python/twitter/pants/tasks/python/resolve.py
new file mode 100644
index 0000000..cc8b9e1
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/resolve.py
@@ -0,0 +1,117 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from collections import defaultdict
+
+from twitter.common.collections import OrderedSet
+from twitter.common.python.fetcher import PyPIFetcher, Fetcher
+from twitter.common.python.http import Crawler
+from twitter.common.python.obtainer import Obtainer
+from twitter.common.python.platforms import Platform
+from twitter.common.python.resolver import ResolverBase
+from twitter.common.python.translator import (
+ ChainedTranslator,
+ EggTranslator,
+ SourceTranslator)
+
+from twitter.pants.base import ParseContext
+from twitter.pants.targets import PythonRequirement
+from twitter.pants.tasks import Task
+
+from .target import is_python_root
+
+
+def get_platforms(platform_list):
+ def translate(platform):
+ return Platform.current() if platform == 'current' else platform
+ return tuple(map(translate, platform_list))
+
+
+class Resolver(ResolverBase):
+ """
+ A multi-platform PythonRequirement resolver for Pants.
+ """
+ @classmethod
+ def fetchers(cls, config):
+ fetchers = []
+ fetchers.append(Fetcher([config.get('python-setup', 'install_cache')]))
+ fetchers.extend(Fetcher([url]) for url in config.getlist('python-repos', 'repos', []))
+ fetchers.extend(PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', []))
+ return fetchers
+
+ @classmethod
+ def crawler(cls, config):
+ return Crawler(cache=config.get('python-setup', 'download_cache'))
+
+ def __init__(self, config, platforms=None):
+ self._platforms = platforms or get_platforms(
+ config.getlist('python-setup', 'platforms', ['current']))
+ self._install_cache = config.get('python-setup', 'install_cache')
+ self._crawler = self.crawler(config)
+ self._fetchers = self.fetchers(config)
+ super(Resolver, self).__init__(cache=self._install_cache)
+
+ def make_installer(self, reqs, interpreter, platform):
+ assert len(reqs) == 1 and isinstance(reqs[0], PythonRequirement), (
+ 'Got requirement list: %s' % repr(reqs))
+ req = reqs[0]
+ fetchers = [Fetcher([req.repository])] if req.repository else self._fetchers
+ translator = ChainedTranslator(
+ EggTranslator(install_cache=self._install_cache, platform=platform, python=interpreter.python),
+ SourceTranslator(install_cache=self._install_cache, interpreter=interpreter, use_2to3=req.use_2to3))
+ obtainer = Obtainer(self._crawler, fetchers, translator)
+ return obtainer.obtain
+
+ def resolve(self, requirements, interpreter):
+ requirements = list(requirements)
+ distributions = OrderedSet()
+ for platform in self._platforms:
+ for req in requirements:
+ if req.should_build(interpreter.python, platform):
+ distributions.update(super(Resolver, self).resolve(req, interpreter=interpreter,
+ platform=platform))
+ return distributions
+
+
+class PythonResolve(Task):
+ CACHE = defaultdict(dict)
+
+ @classmethod
+ def setup_parser(cls, option_group, args, mkflag):
+ option_group.add_option(mkflag("req"), dest = "python_resolve_additional", default=[],
+ action="append", help="Stitch these requirements onto all resolved chroots.")
+
+ def __init__(self, context):
+ with ParseContext.temp():
+ self._additional_requirements = [
+ PythonRequirement(req) for req in context.options.python_resolve_additional]
+ super(PythonResolve, self).__init__(context)
+
+ def resolve_cached(self, interpreter, requirements):
+ rs = frozenset(requirements)
+ if rs not in self.CACHE[interpreter]:
+ resolver = Resolver(self.context.config)
+ self.CACHE[interpreter][rs] = resolver.resolve(requirements, interpreter)
+ # return a copy
+ return OrderedSet(self.CACHE[interpreter][rs])
+
+ def execute(self, _):
+ for target in self.context.targets(is_python_root):
+ target.dependencies.update(self._additional_requirements)
+ for interpreter in target.interpreters:
+ self.context.log.info('Resolving %s [%s] [%d reqs]' % (
+ target.name, interpreter.version_string, len(target.requirements)))
+ target.distributions[interpreter] = self.resolve_cached(interpreter, target.requirements)
diff --git a/src/python/twitter/pants/tasks/python/run.py b/src/python/twitter/pants/tasks/python/run.py
new file mode 100644
index 0000000..2f22f9b
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/run.py
@@ -0,0 +1,48 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+import os
+
+from twitter.common.python.pex import PEX
+
+from twitter.pants.targets import PythonBinary
+
+from .chroot_task import ChrootTask
+
+
+class PythonRun(ChrootTask):
+ @classmethod
+ def setup_parser(cls, option_group, args, mkflag):
+ option_group.add_option(mkflag("args"), dest="python_run_args", default=None,
+ help="Run python target with these extra command line parameters.")
+
+ def get_binary_entry_point(self, target):
+ if target.entry_point:
+ return target.entry_point
+ binary_source = target.sources[0]
+ return os.path.splitext(binary_source)[0].replace(os.sep, '.')
+
+ def execute(self, _):
+ self.context.lock.release()
+ chroot, interpreter = self.select_chroot()
+ binary = self.select_binary()
+ chroot = chroot.clone()
+ chroot.set_entry_point(self.get_binary_entry_point(binary))
+ chroot.freeze()
+ pex = PEX(pex=chroot.path(), interpreter=interpreter)
+ pex.run(args=[self.context.options.python_run_args]
+ if self.context.options.python_run_args else ())
+ chroot.chroot().delete()
diff --git a/src/python/twitter/pants/tasks/python/setup.py b/src/python/twitter/pants/tasks/python/setup.py
index af0fde5..9bf4097 100644
--- a/src/python/twitter/pants/tasks/python/setup.py
+++ b/src/python/twitter/pants/tasks/python/setup.py
@@ -16,147 +16,170 @@
__author__ = 'Brian Wickman'
-import errno
+from functools import reduce
import os
-import subprocess
-import tarfile
-from twitter.pants import get_buildroot
-from twitter.common.dirutil import safe_mkdir, safe_rmtree
-from twitter.common.lang import Compatibility
-from twitter.common.python.interpreter import PythonInterpreter
+from twitter.common.dirutil import safe_mkdir
+from twitter.common.python.distiller import Distiller
+from twitter.common.python.http.link import SourceLink
+from twitter.common.python.installer import Installer
+from twitter.common.python.interpreter import PythonInterpreter, PythonIdentity
+from twitter.common.python.obtainer import Obtainer
from twitter.pants.tasks import Task, TaskError
+from twitter.pants.python.resolver import MultiResolver
-StringIO = Compatibility.StringIO
-
-if Compatibility.PY3:
- from urllib.request import urlopen
-else:
- from urllib2 import urlopen
-
-def setup_virtualenv_py(context):
- virtualenv_cache = context.config.get('python-setup', 'bootstrap_cache')
- virtualenv_target = context.config.get('python-setup', 'virtualenv_target')
- if not os.path.exists(virtualenv_cache):
- safe_mkdir(virtualenv_cache)
- if os.path.exists(os.path.join(virtualenv_target, 'virtualenv.py')):
- return True
- else:
- safe_mkdir(virtualenv_target)
-
- virtualenv_urls = context.config.getlist('python-setup', 'virtualenv_urls')
- tf = None
- for url in virtualenv_urls:
- try:
- ve_tgz = urlopen(url, timeout=5)
- ve_tgz_fp = StringIO(ve_tgz.read())
- ve_tgz_fp.seek(0)
- tf = tarfile.open(fileobj=ve_tgz_fp, mode='r:gz')
- break
- except Exception as e:
- context.log.warn('Failed to pull virtualenv from %s' % url)
- continue
- if not tf:
- raise TaskError('Could not download virtualenv!')
+from .target import is_python_root
+
+from pkg_resources import Requirement
+
+
+def safe_link(src, dst):
try:
- tf.extractall(path=virtualenv_cache)
- except Exception as e:
- raise TaskError('Could not install virtualenv: %s' % e)
- context.log.info('Extracted %s' % url)
-
-def subprocess_call(cmdline):
- po = subprocess.Popen(cmdline, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = po.communicate()
- return (po.returncode, stdout, stderr)
-
-def install_virtualenv(context, interpreter):
- virtualenv_cache = context.config.get('python-setup', 'bootstrap_cache')
- virtualenv_target = context.config.get('python-setup', 'virtualenv_target')
- pip_repos = context.config.getlist('python-repos', 'repos')
- if not os.path.exists(virtualenv_target):
- raise TaskError('Could not find installed virtualenv!')
-
- env_base = context.config.getdefault('pants_pythons')
-
- # setup $PYTHONS/bin/INTERPRETER => interpreter.binary
- env_bin = os.path.join(env_base, 'bin')
- safe_mkdir(env_bin)
- link_target = os.path.join(env_bin, str(interpreter.identity()))
- if os.path.exists(link_target):
- os.unlink(link_target)
- os.symlink(interpreter.binary(), link_target)
-
- # create actual virtualenv that can be used for synthesis of pants pex
- environment_install_path = os.path.join(env_base, str(interpreter.identity()))
- virtualenv_py = os.path.join(virtualenv_target, 'virtualenv.py')
- python_interpreter = interpreter.binary()
-
- if os.path.exists(os.path.join(environment_install_path, 'bin', 'python')) and (
- not context.options.setup_python_force):
- return True
- else:
- safe_rmtree(environment_install_path)
- safe_mkdir(environment_install_path)
-
- cmdline = '%s %s --distribute %s' % (
- python_interpreter,
- virtualenv_py,
- environment_install_path)
- context.log.info('Setting up %s...' % interpreter.identity())
- context.log.debug('Running %s' % cmdline)
-
- rc, stdout, stderr = subprocess_call(cmdline)
- if rc != 0:
- context.log.warn('Failed to install virtualenv: err=%s' % stderr)
- context.log.info('Cleaning up %s' % interpreter.identity())
- safe_rmtree(environment_install_path)
- raise TaskError('Could not install virtualenv for %s' % interpreter.identity())
-
- def install_package(pkg):
- INSTALL_VIRTUALENV_PACKAGE = """
- source %(environment)s/bin/activate
- %(environment)s/bin/pip install --download-cache=%(cache)s \
- %(f_repositories)s --no-index -U %(package)s
- """ % {
- 'environment': environment_install_path,
- 'cache': virtualenv_cache,
- 'f_repositories': ' '.join('-f %s' % repository for repository in pip_repos),
- 'package': pkg
- }
- rc, stdout, stderr = subprocess_call(INSTALL_VIRTUALENV_PACKAGE)
- if rc != 0:
- context.log.warn('Failed to install %s' % pkg)
- context.log.debug('Stdout:\n%s\nStderr:\n%s\n' % (stdout, stderr))
- return rc == 0
-
- for package in context.config.getlist('python-setup', 'bootstrap_packages'):
- context.log.debug('Installing %s into %s' % (package, interpreter.identity()))
- if not install_package(package):
- context.log.warn('Failed to install %s into %s!' % (package, interpreter.identity()))
- return True
+ os.unlink(dst)
+ except OSError:
+ pass
+ os.symlink(src, dst)
class SetupPythonEnvironment(Task):
+ """
+ Establishes the python intepreter(s) for downstream Python tasks e.g. Resolve, Run, PytestRun.
+
+ Populates the product namespace (for typename = 'python'):
+ 'intepreters': ordered list of PythonInterpreter objects
+ """
@classmethod
def setup_parser(cls, option_group, args, mkflag):
- option_group.add_option(mkflag("force"), dest="setup_python_force",
+ option_group.add_option(mkflag("force"), dest="python_setup_force",
action="store_true", default=False,
help="Force clean and install.")
option_group.add_option(mkflag("path"), dest="python_setup_paths",
action="append", default=[],
help="Add a path to search for interpreters, by default PATH.")
+ option_group.add_option(mkflag("interpreter"), dest="python_interpreter",
+ default=[], action='append',
+ help="Constrain what Python interpreters to use. Uses Requirement "
+ "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
+ "By default, no constraints are used. Multiple constraints may "
+ "be added. They will be ORed together.")
+ option_group.add_option(mkflag("multi"), dest="python_multi",
+ default=False, action='store_true',
+ help="Allow multiple interpreters to be bound to an upstream chroot.")
+
+ def __init__(self, context):
+ context.products.require('python')
+ super(SetupPythonEnvironment, self).__init__(context)
+ self._interpreter_cache = context.config.get('python-setup', 'interpreter_cache')
+ safe_mkdir(self._interpreter_cache)
+ self._fetchers = MultiResolver.fetchers(context.config)
+ self._crawler = MultiResolver.crawler(context.config)
+ self._interpreters = set()
- def execute(self, _):
- setup_paths = self.context.options.python_setup_paths or os.getenv('PATH').split(':')
- self.context.log.debug('Finding interpreters in %s' % setup_paths)
- interpreters = PythonInterpreter.all(setup_paths)
- self.context.log.debug('Found %d interpreters' % len(interpreters))
- setup_virtualenv_py(self.context)
-
- for interpreter in interpreters:
- self.context.log.debug('Preparing %s' % interpreter)
- try:
- install_virtualenv(self.context, interpreter)
- except TaskError as e:
- print('Failed to install %s, continuing anyway.' % interpreter)
+ @classmethod
+ def interpreter_from_path(cls, path):
+ _, interpreter_dir = os.path.split(path)
+ identity = PythonIdentity.from_path(interpreter_dir)
+ try:
+ executable = os.readlink(os.path.join(path, 'python'))
+ except OSError:
+ # Failed to setup interpreter?
+ return None
+ try:
+ distribute_path = os.readlink(os.path.join(path, 'distribute'))
+ except OSError:
+ distribute_path = None
+ return PythonInterpreter(executable, identity, distribute_path)
+
+ def setup_distribute(self, interpreter, dest):
+ obtainer = Obtainer(self._crawler, self._fetchers, [])
+ links = [link for link in obtainer.iter(Requirement.parse('distribute'))
+ if isinstance(link, SourceLink)]
+ for link in links:
+ self.context.log.debug('Fetching %s' % link)
+ sdist = link.fetch()
+ self.context.log.debug('Installing %s' % sdist)
+ installer = Installer(sdist, strict=False, interpreter=interpreter)
+ dist = installer.distribution()
+ self.context.log.debug('Distilling %s' % dist)
+ egg = Distiller(dist).distill(into=dest)
+ safe_link(egg, os.path.join(dest, 'distribute'))
+ break
+
+ def setup_interpreter(self, interpreter):
+ interpreter_dir = os.path.join(self._interpreter_cache, str(interpreter.identity))
+ safe_mkdir(interpreter_dir)
+ safe_link(interpreter.binary, os.path.join(interpreter_dir, 'python'))
+ if interpreter.distribute:
+ safe_link(interpreter.distribute, os.path.join(interpreter_dir, 'distribute'))
+ else:
+ self.setup_distribute(interpreter, interpreter_dir)
+
+ def setup_cached(self):
+ for interpreter_dir in os.listdir(self._interpreter_cache):
+ path = os.path.join(self._interpreter_cache, interpreter_dir)
+ pi = self.interpreter_from_path(path)
+ if pi:
+ self.context.log.debug('Found interpreter %s: %s (cached)' % (pi.binary, str(pi.identity)))
+ self._interpreters.add(pi)
+
+ def setup_paths(self, paths):
+ for interpreter in PythonInterpreter.all(paths):
+ identity_str = str(interpreter.identity)
+ path = os.path.join(self._interpreter_cache, identity_str)
+ pi = self.interpreter_from_path(path)
+ if pi is None or pi.distribute is None:
+ self.context.log.debug('Found interpreter %s: %s (%s)' % (
+ interpreter.binary,
+ str(interpreter.identity),
+ 'uncached' if pi is None else 'incomplete'))
+ self.setup_interpreter(interpreter)
+ pi = self.interpreter_from_path(path)
+ if pi is None or pi.distribute is None:
+ continue
+ self._interpreters.add(pi)
+
+ def matches(self, filters):
+ for interpreter in self._interpreters:
+ if any(interpreter.identity.matches(filt) for filt in filters):
+ yield interpreter
+
+ def interpreters(self):
+ has_setup = False
+ setup_paths = self.context.options.python_setup_paths or os.getenv('PATH').split(os.pathsep)
+ self.setup_cached()
+ if self.context.options.python_setup_force:
+ has_setup = True
+ self.setup_paths(setup_paths)
+ filters = self.context.options.python_interpreter or ['']
+ matches = list(self.matches(filters))
+ if len(matches) == 0 and not has_setup:
+ self.setup_paths(setup_paths)
+ matches = list(self.matches(filters))
+ if len(matches) == 0:
+ self.context.log.warn('Found no valid interpreters!')
+ return matches
+
+ def select_interpreter(self, compatibilities):
+ if self.context.options.python_multi:
+ return compatibilities
+ me = PythonInterpreter.get()
+ if me in compatibilities:
+ return [me]
+ return [min(compatibilities)] if compatibilities else []
+ def execute(self, _):
+ all_interpreters = set(self.interpreters())
+ for target in self.context.targets(is_python_root):
+ self.context.log.info('Setting up interpreters for %s' % target)
+ closure = target.closure()
+ self.context.log.debug(' - Target closure: %d targets' % len(closure))
+ target_compatibilities = [set(self.matches(getattr(closure_target, 'compatibility', [''])))
+ for closure_target in closure]
+ target_compatibilities = reduce(set.intersection, target_compatibilities, all_interpreters)
+ self.context.log.debug(' - Target minimum compatibility: %s' % (
+ ' '.join(interp.version_string for interp in target_compatibilities)))
+ interpreters = self.select_interpreter(target_compatibilities)
+ self.context.log.debug(' - Selected: %s' % interpreters)
+ if not interpreters:
+ raise TaskError('No compatible interpreters for %s' % target)
+ target.interpreters = interpreters
diff --git a/src/python/twitter/pants/tasks/python/setup_py.py b/src/python/twitter/pants/tasks/python/setup_py.py
new file mode 100644
index 0000000..397aec5
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/setup_py.py
@@ -0,0 +1,200 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from __future__ import print_function
+
+from collections import defaultdict
+import os
+import pprint
+import subprocess
+import sys
+
+from twitter.common.collections import OrderedSet
+from twitter.common.contextutil import pushd
+from twitter.common.dirutil import safe_rmtree
+from twitter.common.dirutil.chroot import Chroot
+
+from twitter.pants.targets import (
+ PythonLibrary,
+ PythonRequirement,
+ PythonTarget,
+ PythonThriftLibrary)
+from twitter.pants.tasks import Task, TaskError
+
+
+SETUP_BOILERPLATE = """
+# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
+# Target: %(setup_target)s
+
+from setuptools import setup
+
+setup(**
+%(setup_dict)s
+)
+"""
+
+
+class PythonSetupPy(Task):
+ """Generate setup.py-based Python projects from python_library targets."""
+ PACKAGE_ROOT = 'src'
+
+ @classmethod
+ def setup_parser(cls, option_group, args, mkflag):
+ option_group.add_option(mkflag("args"), dest="python_setup_py_args",
+ default=None,
+ help="[%default] The setup.py command to run against the artifact. "
+ "By default 'sdist' will be run and the source distribution dumped "
+ "into the pants dist directory.")
+
+ @classmethod
+ def minified_dependencies(cls, target):
+ depmap = defaultdict(OrderedSet)
+ providers = []
+
+ def resolve(trg):
+ if isinstance(trg, PythonTarget) and trg.provides:
+ providers.append(trg.provides.key)
+ for dependency in getattr(trg, 'dependencies', []):
+ for prv in providers:
+ for dep in dependency.resolve():
+ depmap[prv].add(dep)
+ resolve(dep)
+ if isinstance(trg, PythonTarget) and trg.provides:
+ assert providers[-1] == trg.provides.key
+ providers.pop()
+
+ resolve(target)
+
+ import pprint
+ pprint.pprint(depmap, indent=4)
+
+ root_deps = depmap.pop(target.provides.key)
+ def elide(trg):
+ if isinstance(trg, PythonTarget) and trg.provides:
+ return
+ if any(trg in depset for depset in depmap.values()):
+ root_deps.discard(trg)
+
+ target.walk(elide)
+ return root_deps
+
+ @classmethod
+ def write_sources(cls, chroot, target, dependencies):
+ resources = defaultdict(set)
+ def write(trg, src):
+ chroot.link(os.path.join(trg.target_base, src), os.path.join(cls.PACKAGE_ROOT, src))
+ def write_resource(trg, resource):
+ base, fn = os.path.split(resource)
+ resources[base.replace(os.sep, '.')].add(fn)
+ chroot.link(os.path.join(trg.target_base, resource), os.path.join(cls.PACKAGE_ROOT, resource))
+ # TODO(wickman) This is going to need revisiting once resources become first-class citizens.
+ for source in list(target.sources):
+ write(target, source)
+ for resource in list(target.resources):
+ write_resource(target, resource)
+ for target in dependencies:
+ if isinstance(target, PythonLibrary) and not target.provides:
+ for source in list(target.sources):
+ write(target, source)
+ for resource in list(target.resources):
+ write_resource(target, resource)
+ elif isinstance(target, PythonThriftLibrary):
+ print('WARNING: Thrift libraries not yet supported: %s' % target, file=sys.stderr)
+ return dict((pkg_name, list(r)) for (pkg_name, r) in resources.items())
+
+ @classmethod
+ def establish_namespaces(cls, chroot, namespace_packages):
+ base = os.path.join(chroot.path(), cls.PACKAGE_ROOT)
+
+ packages = set()
+
+ for root, dirs, files in os.walk(base):
+ rel_root = os.path.relpath(root, base)
+ if rel_root == '.':
+ continue
+ package_name = rel_root.replace(os.path.sep, '.')
+ packages.add(package_name)
+ if '__init__.py' not in files:
+ with open(os.path.join(root, '__init__.py'), 'w') as fp:
+ fp.write("__import__('pkg_resources').declare_namespace(__name__)"
+ if package_name in namespace_packages else "")
+
+ return packages, namespace_packages
+
+ @classmethod
+ def write_setup(cls, chroot, target):
+ dependencies = cls.minified_dependencies(target)
+ resources = cls.write_sources(chroot, target, dependencies)
+ setup_keywords = target.provides._kw
+ install_requires = set()
+ package_dir = {'': cls.PACKAGE_ROOT}
+ packages, _ = cls.establish_namespaces(chroot, setup_keywords.get('namespace_packages', {}))
+ for dep in dependencies:
+ if isinstance(dep, PythonRequirement):
+ install_requires.add(str(dep._requirement))
+ elif isinstance(dep, PythonTarget) and dep.provides:
+ install_requires.add(dep.provides.key)
+
+ setup_keywords.update(
+ install_requires = list(install_requires),
+ package_dir = package_dir,
+ packages = list(sorted(packages)),
+ package_data = resources)
+
+ chroot.write(SETUP_BOILERPLATE % {
+ 'setup_dict': pprint.pformat(setup_keywords, indent=4),
+ 'setup_target': repr(target)
+ }, 'setup.py')
+
+ def process_target(self, target):
+ distdir = self.context.config.getdefault('pants_distdir')
+ setup_dir = os.path.join(distdir, '%s-%s' % (
+ target.provides._name, target.provides._version))
+ chroot = Chroot(distdir, name=target.provides._name)
+
+ self.write_setup(chroot, target)
+ safe_rmtree(setup_dir)
+ os.rename(chroot.path(), setup_dir)
+
+ arguments = 'sdist --dist-dir=%s ' % distdir
+ arguments += self.context.options.python_setup_py_args or ''
+
+ # TODO(wickman) This goes somewhat counter to run/repl commands in that it is interpreter
+ # agnostic. This is fine for the default (sdist) command but for bdist commands, you really
+ # want to be dependent upon the root=>chroot=>setup to get the desired effect. Figure out
+ # if we want to have a second type of synthetic root or just paste this onto the existing
+ # PythonRoot concept.
+ with pushd(setup_dir):
+ cmd = '%s setup.py %s' % (sys.executable, arguments)
+ self.context.log.info('Running "%s" in %s' % (cmd, setup_dir))
+ po = subprocess.Popen(cmd, shell=True)
+ po.wait()
+
+ if po.returncode != 0:
+ raise TaskError('Failed to run %s!' % cmd)
+
+ expected_tgz = '%s-%s.tar.gz' % (target.provides._name, target.provides._version)
+ dist_tgz = os.path.join(distdir, expected_tgz)
+ if not os.path.exists(dist_tgz):
+ self.context.log.warn('Could not find expected artifact %s!' % dist_tgz)
+ else:
+ self.context.log.warn('Wrote %s' % dist_tgz)
+ safe_rmtree(setup_dir)
+
+ def execute(self, _):
+ for target in self.context.target_roots:
+ if isinstance(target, PythonTarget) and target.provides:
+ self.process_target(target)
diff --git a/src/python/twitter/pants/tasks/python/target.py b/src/python/twitter/pants/tasks/python/target.py
new file mode 100644
index 0000000..f3553c3
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/target.py
@@ -0,0 +1,66 @@
+# ==================================================================================================
+# Copyright 2013 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from twitter.common.collections import OrderedSet
+
+from twitter.pants.base import ParseContext, Target
+from twitter.pants.base.target import TargetDefinitionException
+from twitter.pants.targets import PythonRequirement
+
+
+def is_python_root(target):
+ return isinstance(target, PythonRoot)
+
+
+class PythonRoot(Target):
+ """
+ Internal target for managing python chroot state.
+ """
+ @classmethod
+ def synthetic_name(cls, targets):
+ return list(targets)[0].name if len(targets) > 0 else 'empty'
+
+ @classmethod
+ def union(cls, targets, name=None):
+ name = name or (cls.synthetic_name(targets) + '-union')
+ with ParseContext.temp():
+ return cls(name, dependencies=targets)
+
+ @classmethod
+ def of(cls, target):
+ with ParseContext.temp():
+ return cls(target.name, dependencies=[target])
+
+ def __init__(self, name, dependencies=None):
+ self.dependencies = OrderedSet(dependencies) if dependencies else OrderedSet()
+ self.internal_dependencies = OrderedSet()
+ self.interpreters = []
+ self.distributions = {} # interpreter => distributions
+ self.chroots = {} # interpreter => chroots
+ super(PythonRoot, self).__init__(name)
+
+ def closure(self):
+ os = OrderedSet()
+ for target in self.dependencies | self.internal_dependencies:
+ os.update(target.closure())
+ return os
+
+ def select(self, target_class):
+ return OrderedSet(target for target in self.closure() if isinstance(target, target_class))
+
+ @property
+ def requirements(self):
+ return self.select(PythonRequirement)
diff --git a/src/python/twitter/pants/tasks/python/thrift_gen.py b/src/python/twitter/pants/tasks/python/thrift_gen.py
new file mode 100644
index 0000000..adf971b
--- /dev/null
+++ b/src/python/twitter/pants/tasks/python/thrift_gen.py
@@ -0,0 +1,137 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+import os
+
+from twitter.pants import is_python, thrift_util
+from twitter.pants.base import ParseContext
+from twitter.pants.base.build_cache import BuildCache
+from twitter.pants.python.code_generator import CodeGenerator
+from twitter.pants.targets import PythonLibrary, PythonRequirement, PythonThriftLibrary
+from twitter.pants.tasks.thrift_gen_base import ThriftLanguageBase
+
+
+class PythonThriftGen(ThriftLanguageBase):
+ def __init__(self, context):
+ super(PythonThriftGen, self).__init__(context)
+ gen_info = context.config.getdict('thrift-gen', 'python')
+ self._cache = BuildCache(context.config.get('python-setup', 'sdist_cache'))
+ self._gen = gen_info['gen']
+ with ParseContext.temp():
+ self._thrift = PythonRequirement('thrift==%s' % gen_info['egg_version'], use_2to3=True)
+
+ @property
+ def gen(self):
+ return self._gen
+
+ def can_build(self, target):
+ return isinstance(target, PythonThriftLibrary)
+
+ @property
+ def target_predicate(self):
+ return is_python
+
+ def _target_source(self, namespace, types):
+ ns_components = namespace.split('.')
+ for k in range(1, len(ns_components)):
+ yield '/'.join(ns_components[:k]) + '/__init__.py'
+ basepath = namespace.replace('.', '/')
+ def path(name):
+ return os.path.join(basepath, '%s.py' % name)
+ yield path('__init__')
+ if 'const' in types:
+ yield path('constants')
+ if set(['enum', 'exception', 'struct', 'union']) & set(types.keys()):
+ yield path('ttypes')
+ for service in types['service']:
+ yield path(service)
+ yield os.path.join(basepath, '%s-remote' % service)
+
+ def target_sources(self, target):
+ sources = []
+ for source, namespace, types in thrift_util.calculate_entities(target, 'py'):
+ if namespace is None:
+ namespace, _ = os.path.splitext(os.path.basename(source))
+ sources.extend(self._target_source(namespace, types))
+ return sources
+
+ def create_target(self, gen_target, dependees, source_root):
+ library_key = self._cache.key_for_target(gen_target)
+ # TODO(wickman) Make requirement_string a classmethod so that we can avoid
+ # linking the chroot at all.
+ pstr = PythonSourceToDistribution(gen_target,
+ self.target_sources(gen_target),
+ os.path.join(source_root),
+ library_key.hash)
+ if self._cache.needs_update(library_key):
+ sdist = pstr.build()
+ self._cache.update(library_key, build_artifacts=[sdist], artifact_root=os.path.dirname(sdist))
+ with ParseContext.temp():
+ # TODO(wickman) Specifying the internal repository is bound to break things
+ # downstream once we support remote repositories directly from PEX. We'll either
+ # have to separate internal vs explicit repositories or pass along an explicit
+ # repository list for the Resolver to consume separate of the repos specified by
+ # the PythonRequirement.
+ target = PythonRequirement(pstr.requirement_string(), repository=library_key.filename,
+ use_2to3=True)
+
+ # NNN TODO(wickman) This seems hacky?
+ # NNN TODO(wickman) Split CodeGen into two tasks: code_gen + synthesize_codegen_targets?
+ self.context.log.debug('Replacing %r with:' % gen_target)
+ self.context.log.debug(' generated: %r' % target)
+ self.context.log.debug(' library: %r' % self._thrift)
+ self.context.remove_target(gen_target)
+ self.context.add_target(target)
+ self.context.add_target(self._thrift)
+ for dep in dependees:
+ self.context.log.debug(' -= %s' % dep)
+ dep.dependencies.discard(gen_target)
+ dep.dependencies.add(target)
+ dep.dependencies.add(self._thrift)
+
+
+class PythonSourceToDistribution(CodeGenerator):
+ def __init__(self, gen_target, gen_files, gen_root, hash_key):
+ super(PythonSourceToDistribution, self).__init__(gen_target, gen_root, '-' + hash_key[:8])
+ self.link_files(gen_root, gen_files)
+
+ def link_files(self, gen_root, gen_files):
+ for fn in gen_files:
+ source = os.path.join(gen_root, self.package_dir(), fn)
+ dest = os.path.join(self.codegen_root, self.package_dir(), fn)
+ if not os.path.exists(source) and source.endswith('__init__.py'):
+ self.chroot.touch(dest)
+ continue
+ # print('Linking %s => %s' % (source, dest))
+ self.chroot.link(source, dest)
+
+ def package_dir(self):
+ return 'gen-py'
+
+ def generate(self):
+ genpy_root = os.path.join(self.chroot.path(), self.codegen_root, self.package_dir())
+ for dir, _, files in os.walk(os.path.normpath(genpy_root)):
+ reldir = os.path.relpath(dir, genpy_root)
+ if reldir == '.': continue
+ if '__init__.py' not in files: continue
+ init_py_abspath = os.path.join(dir, '__init__.py')
+ module_path = self.path_to_module(reldir)
+ self.created_packages.add(module_path)
+ if len(files) == 1:
+ self.created_namespace_packages.add(module_path)
+ if os.path.getsize(init_py_abspath) == 0:
+ with open(init_py_abspath, 'wb') as f:
+ f.write(b"__import__('pkg_resources').declare_namespace(__name__)")
diff --git a/src/python/twitter/pants/tasks/scala_compile.py b/src/python/twitter/pants/tasks/scala_compile.py
index afad081..a7280f6 100644
--- a/src/python/twitter/pants/tasks/scala_compile.py
+++ b/src/python/twitter/pants/tasks/scala_compile.py
@@ -14,6 +14,7 @@
# limitations under the License.
# ==================================================================================================
+from functools import reduce
import os
import textwrap
@@ -53,13 +54,6 @@ class ScalaCompile(NailgunTask):
self._compile_profile = context.config.get('scala-compile', 'compile-profile')
self._depemitter_profile = context.config.get('scala-compile', 'dependencies-plugin-profile')
- # All scala targets implicitly depend on the selected scala runtime.
- scaladeps = []
- for spec in context.config.getlist('scala-compile', 'scaladeps'):
- scaladeps.extend(context.resolve(spec))
- for target in context.targets(_is_scala):
- target.update_dependencies(scaladeps)
-
workdir = context.config.get('scala-compile', 'workdir')
self._classes_dir = os.path.join(workdir, 'classes')
@@ -74,6 +68,14 @@ class ScalaCompile(NailgunTask):
self._confs = context.config.getlist('scala-compile', 'confs')
self._depfile = os.path.join(workdir, 'dependencies')
+ def prepare(self):
+ # All scala targets implicitly depend on the selected scala runtime.
+ self.context.log.debug('ScalaCompile rewriting upstream dependencies.')
+ scaladeps = []
+ for spec in self.context.config.getlist('scala-compile', 'scaladeps'):
+ scaladeps.extend(self.context.resolve(spec))
+ for target in self.context.targets(_is_scala):
+ target.update_dependencies(scaladeps)
# If we are compiling scala libraries with circular deps on java libraries we need to make sure
# those cycle deps are present.
self._inject_java_cycles()
@@ -84,9 +86,9 @@ class ScalaCompile(NailgunTask):
self.context.add_target(java_target)
def execute(self, targets):
- scala_targets = filter(_is_scala, targets)
+ scala_targets = list(filter(_is_scala, targets))
if scala_targets:
- with self.context.state('classpath', []) as cp:
+ with self.context.state.list('classpath') as cp:
for conf in self._confs:
cp.insert(0, (conf, self._classes_dir))
diff --git a/src/python/twitter/pants/tasks/scala_repl.py b/src/python/twitter/pants/tasks/scala_repl.py
index 4d3827c..a97bee0 100644
--- a/src/python/twitter/pants/tasks/scala_repl.py
+++ b/src/python/twitter/pants/tasks/scala_repl.py
@@ -45,10 +45,14 @@ class ScalaRepl(JvmTask):
self.main = context.config.get('scala-repl', 'main')
def execute(self, targets):
+ jvm_targets = self.context.targets(is_jvm)
+ if len(jvm_targets) == 0:
+ # No JVM targets to repl
+ return
+
# The repl session may last a while, allow concurrent pants activity during this pants idle
# period.
self.context.lock.release()
-
self.save_stty_options()
try:
runjava_indivisible(
diff --git a/src/python/twitter/pants/tasks/thrift_gen.py b/src/python/twitter/pants/tasks/thrift_gen.py
index 1206357..b337aad 100644
--- a/src/python/twitter/pants/tasks/thrift_gen.py
+++ b/src/python/twitter/pants/tasks/thrift_gen.py
@@ -19,26 +19,28 @@ import re
import subprocess
from collections import defaultdict
+import subprocess
from twitter.common import log
-from twitter.common.collections import OrderedSet
from twitter.common.dirutil import safe_mkdir
-
-from twitter.pants import is_jvm, is_python
+from twitter.pants import thrift_util
from twitter.pants.targets import (
JavaLibrary,
JavaThriftLibrary,
PythonLibrary,
PythonThriftLibrary)
-from twitter.pants.tasks import TaskError
-from twitter.pants.tasks.code_gen import CodeGen
-from twitter.pants.thrift_util import calculate_compile_roots, select_thrift_binary
+
+from . import TaskError
+from .code_gen import CodeGen
+from .thrift_gen_java import JavaThriftGen
+from .python.thrift_gen import PythonThriftGen
+
class ThriftGen(CodeGen):
- class GenInfo(object):
- def __init__(self, gen, deps):
- self.gen = gen
- self.deps = deps
+ DELEGATES = {
+ 'java': JavaThriftGen,
+ 'python': PythonThriftGen,
+ }
@classmethod
def setup_parser(cls, option_group, args, mkflag):
@@ -49,73 +51,57 @@ class ThriftGen(CodeGen):
help="Thrift compiler version.")
option_group.add_option(mkflag("lang"), dest="thrift_gen_langs", default=[],
- action="append", type="choice", choices=['python', 'java'],
- help="Force generation of thrift code for these languages. Both "
- "'python' and 'java' are supported")
+ action="append", type="choice", choices=list(cls.DELEGATES.keys()),
+ help="Force generation of thrift code for these languages. The "
+ "following are supported: %s" % ' '.join(cls.DELEGATES.keys()))
def __init__(self, context):
CodeGen.__init__(self, context)
-
self.output_dir = (
context.options.thrift_gen_create_outdir
or context.config.get('thrift-gen', 'workdir')
)
self.strict = context.config.getbool('thrift-gen', 'strict')
self.verbose = context.config.getbool('thrift-gen', 'verbose')
-
- def create_geninfo(key):
- gen_info = context.config.getdict('thrift-gen', key)
- gen = gen_info['gen']
- deps = {}
- for category, depspecs in gen_info['deps'].items():
- dependencies = OrderedSet()
- deps[category] = dependencies
- for depspec in depspecs:
- dependencies.update(context.resolve(depspec))
- return ThriftGen.GenInfo(gen, deps)
-
- self.gen_java = create_geninfo('java')
- self.gen_python = create_geninfo('python')
-
+ self.delegates = dict((lang, delegate_cls(context))
+ for (lang, delegate_cls) in self.DELEGATES.items())
self.gen_langs = set(context.options.thrift_gen_langs)
- for lang in ('java', 'python'):
+ for lang in self.delegates:
if self.context.products.isrequired(lang):
self.gen_langs.add(lang)
+ def choose_delegate(self, lang):
+ try:
+ return self.delegates[lang]
+ except KeyError:
+ raise TaskError('Unknown thrift generation language: %s' % lang)
def invalidate_for(self):
return self.gen_langs
def is_gentarget(self, target):
- return ((isinstance(target, JavaThriftLibrary) and target.compiler == 'thrift')
- or isinstance(target, PythonThriftLibrary))
+ value = any(delegate.can_build(target) for delegate in self.delegates.values())
+ #self.context.log.debug('is_gentarget(%s) = %s' % (target, value))
+ return value
def is_forced(self, lang):
return lang in self.gen_langs
def genlangs(self):
- return dict(java=is_jvm, python=is_python)
+ return dict((lang, delegate.target_predicate) for (lang, delegate) in self.delegates.items())
def genlang(self, lang, targets):
- thrift_binary = select_thrift_binary(
+ thrift_binary = thrift_util.select_thrift_binary(
self.context.config,
version=self.context.options.thrift_version
)
-
- bases, sources = calculate_compile_roots(targets, self.is_gentarget)
-
- if lang == 'java':
- gen = self.gen_java.gen
- elif lang == 'python':
- gen = self.gen_python.gen
- else:
- raise TaskError('Unrecognized thrift gen lang: %s' % lang)
-
+ bases, sources = thrift_util.calculate_compile_roots(targets, self.is_gentarget)
+ delegate = self.choose_delegate(lang)
safe_mkdir(self.output_dir)
args = [
thrift_binary,
- '--gen', gen,
+ '--gen', delegate.gen,
'-recurse',
'-o', self.output_dir,
]
@@ -134,115 +120,10 @@ class ThriftGen(CodeGen):
log.debug('Executing: %s' % ' '.join(cmd))
processes.append(subprocess.Popen(cmd))
- # TODO(John Sirois): Use map sources to targets and use TargetError to invalidate less thrift
+ # TODO(John Sirois): Use map sources to targets and use TargetError to invalidate fewer thrift
# targets onfailure
if sum(p.wait() for p in processes) != 0:
raise TaskError
- def createtarget(self, lang, gentarget, dependees):
- if lang == 'java':
- return self._create_java_target(gentarget, dependees)
- elif lang == 'python':
- return self._create_python_target(gentarget, dependees)
- else:
- raise TaskError('Unrecognized thrift gen lang: %s' % lang)
-
- def _create_java_target(self, target, dependees):
- def create_target(files, deps):
- return self.context.add_new_target(os.path.join(self.output_dir, 'gen-java'),
- JavaLibrary,
- name=target.id,
- provides=target.provides,
- sources=files,
- dependencies=deps)
- return self._inject_target(target, dependees, self.gen_java, 'java', create_target)
-
- def _create_python_target(self, target, dependees):
- def create_target(files, deps):
- return self.context.add_new_target(os.path.join(self.output_dir, 'gen-py'),
- PythonLibrary,
- name=target.id,
- sources=files,
- dependencies=deps)
- return self._inject_target(target, dependees, self.gen_python, 'py', create_target)
-
- def _inject_target(self, target, dependees, geninfo, namespace, create_target):
- files = []
- has_service = False
- for source in target.sources:
- services, genfiles = calculate_gen(os.path.join(target.target_base, source))
- has_service = has_service or services
- files.extend(genfiles.get(namespace, []))
- deps = geninfo.deps['service' if has_service else 'structs']
- tgt = create_target(files, deps)
- tgt.id = target.id
- tgt.is_codegen = True
- for dependee in dependees:
- dependee.update_dependencies([tgt])
- return tgt
-
-
-NAMESPACE_PARSER = re.compile(r'^\s*namespace\s+([^\s]+)\s+([^\s]+)\s*$')
-TYPE_PARSER = re.compile(r'^\s*(const|enum|exception|service|struct|union)\s+([^\s{]+).*')
-
-
-# TODO(John Sirois): consolidate thrift parsing to 1 pass instead of 2
-def calculate_gen(source):
- """Calculates the service types and files generated for the given thrift IDL source.
-
- Returns a tuple of (service types, generated files).
- """
-
- with open(source, 'r') as thrift:
- lines = thrift.readlines()
- namespaces = {}
- types = defaultdict(set)
- for line in lines:
- match = NAMESPACE_PARSER.match(line)
- if match:
- lang = match.group(1)
- namespace = match.group(2)
- namespaces[lang] = namespace
- else:
- match = TYPE_PARSER.match(line)
- if match:
- typename = match.group(1)
- name = match.group(2)
- types[typename].add(name)
-
- genfiles = defaultdict(set)
-
- namespace = namespaces.get('py')
- if namespace:
- genfiles['py'].update(calculate_python_genfiles(namespace, types))
-
- namespace = namespaces.get('java')
- if namespace:
- genfiles['java'].update(calculate_java_genfiles(namespace, types))
-
- return types['service'], genfiles
-
-
-def calculate_python_genfiles(namespace, types):
- basepath = namespace.replace('.', '/')
- def path(name):
- return os.path.join(basepath, '%s.py' % name)
- yield path('__init__')
- if 'const' in types:
- yield path('constants')
- if set(['enum', 'exception', 'struct', 'union']) & set(types.keys()):
- yield path('ttypes')
- for service in types['service']:
- yield path(service)
- yield os.path.join(basepath, '%s-remote' % service)
-
-
-def calculate_java_genfiles(namespace, types):
- basepath = namespace.replace('.', '/')
- def path(name):
- return os.path.join(basepath, '%s.java' % name)
- if 'const' in types:
- yield path('Constants')
- for typename in ['enum', 'exception', 'service', 'struct', 'union']:
- for name in types[typename]:
- yield path(name)
+ def createtarget(self, lang, target, dependees):
+ return self.choose_delegate(lang).create_target(target, dependees, self.output_dir)
diff --git a/src/python/twitter/pants/tasks/thrift_gen_base.py b/src/python/twitter/pants/tasks/thrift_gen_base.py
new file mode 100644
index 0000000..5fe5e6a
--- /dev/null
+++ b/src/python/twitter/pants/tasks/thrift_gen_base.py
@@ -0,0 +1,58 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+from abc import abstractmethod, abstractproperty
+from twitter.common.collections import OrderedSet
+from twitter.common.lang import AbstractClass
+
+
+# TODO(wickman) This facade should ideally replace ThriftGen and subsume its
+# functionality in order to simplify upstream consumption.
+class ThriftLanguageBase(AbstractClass):
+ """
+ Base class for classes that generate targets from source generated by
+ the standard Thrift IDL code generator.
+ """
+ def __init__(self, context):
+ self.context = context
+
+ @abstractproperty
+ def gen(self):
+ """Returns the generated code type this target requires, e.g. java, py:newstyle."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def can_build(self, target):
+ """Is this the thrift language generator to use for this target?"""
+ raise NotImplementedError
+
+ @abstractproperty
+ def target_predicate(self):
+ """Return a predicate that takes a dependee and can determine if this is the proper
+ language target for the dependee."""
+ raise NotImplementedError
+
+ def resolve(self, deps):
+ dependencies = OrderedSet()
+ for dep in deps:
+ dependencies.update(self.context.resolve(dep))
+ return dependencies
+
+ @abstractmethod
+ def create_target(self, gen_target, dependees, services, namespaces, types, source_root):
+ """Create the new target and updates the pants dependency tree accordingly,
+ returning the generated target."""
+ raise NotImplementedError
diff --git a/src/python/twitter/pants/tasks/thrift_gen_java.py b/src/python/twitter/pants/tasks/thrift_gen_java.py
new file mode 100644
index 0000000..f72f0d8
--- /dev/null
+++ b/src/python/twitter/pants/tasks/thrift_gen_java.py
@@ -0,0 +1,73 @@
+# ==================================================================================================
+# Copyright 2012 Twitter, Inc.
+# --------------------------------------------------------------------------------------------------
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this work except in compliance with the License.
+# You may obtain a copy of the License in the LICENSE file, or at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==================================================================================================
+
+import os
+
+from twitter.pants import is_java, thrift_util
+from twitter.pants.targets import JavaLibrary, JavaThriftLibrary
+
+from .thrift_gen_base import ThriftLanguageBase
+
+
+class JavaThriftGen(ThriftLanguageBase):
+ def __init__(self, context):
+ super(JavaThriftGen, self).__init__(context)
+ gen_info = context.config.getdict('thrift-gen', 'java')
+ self._gen = gen_info['gen']
+ self._service_deps = self.resolve(gen_info['deps']['service'])
+ self._struct_deps = self.resolve(gen_info['deps']['structs'])
+
+ @property
+ def gen(self):
+ return self._gen
+
+ def can_build(self, target):
+ return isinstance(target, JavaThriftLibrary) and target.compiler == 'thrift'
+
+ @property
+ def target_predicate(self):
+ return is_java
+
+ def _target_sources(self, namespace, types):
+ basepath = namespace.replace('.', '/')
+ def path(name):
+ return os.path.join(basepath, '%s.java' % name)
+ if 'const' in types:
+ yield path('Constants')
+ for typename in ['enum', 'exception', 'service', 'struct', 'union']:
+ for name in types[typename]:
+ yield path(name)
+
+ def target_geninfo(self, target):
+ sources, services = [], []
+ for _, namespace, types in thrift_util.calculate_entities(target, 'java'):
+ sources.extend(self._target_sources(namespace, types))
+ services.extend(types['service'])
+ return sources, services
+
+ def create_target(self, gen_target, dependees, source_root):
+ sources, services = self.target_geninfo(gen_target)
+ target = self.context.add_new_target(
+ os.path.join(source_root, 'gen-java'),
+ JavaLibrary,
+ name=gen_target.id,
+ provides=gen_target.provides,
+ sources=sources,
+ dependencies=self._service_deps if services else self._struct_deps)
+ target.id, target.is_codegen = gen_target.id, True
+ for dependee in dependees:
+ dependee.update_dependencies([target])
+ return target
diff --git a/src/python/twitter/pants/tasks/thriftstore_dml_gen.py b/src/python/twitter/pants/tasks/thriftstore_dml_gen.py
index 68f3acc..7f8995d 100644
--- a/src/python/twitter/pants/tasks/thriftstore_dml_gen.py
+++ b/src/python/twitter/pants/tasks/thriftstore_dml_gen.py
@@ -131,7 +131,7 @@ class ThriftstoreDMLGen(CodeGen):
output, error = p.communicate()
if p.wait() != 0:
raise TaskError
- thriftstore_classes = filter(lambda s: s.strip() != '', output.split('\n'))
+ thriftstore_classes = list(filter(lambda s: s.strip() != '', output.splitlines()))
return thriftstore_classes
def _create_java_target(self, target):
diff --git a/src/python/twitter/pants/thrift_util.py b/src/python/twitter/pants/thrift_util.py
index 1cbb951..28891ea 100644
--- a/src/python/twitter/pants/thrift_util.py
+++ b/src/python/twitter/pants/thrift_util.py
@@ -1,3 +1,12 @@
+__all__ = (
+ 'find_includes',
+ 'find_root_thrifts',
+ 'calculate_compile_sources',
+ 'calculate_compile_roots',
+ 'calculate_entities'
+)
+
+from collections import defaultdict
import os
import re
@@ -81,6 +90,40 @@ def calculate_compile_roots(targets, is_thrift_target):
return basedirs, sources
+NAMESPACE_PARSER = re.compile(r'^\s*namespace\s+([^\s]+)\s+([^\s]+)\s*$')
+TYPE_PARSER = re.compile(r'^\s*(const|enum|exception|service|struct|union)\s+([^\s{]+).*')
+
+
+def _calculate_entities(source):
+ with open(source, 'r') as thrift:
+ lines = thrift.readlines()
+ namespaces = {}
+ types = defaultdict(set)
+ for line in lines:
+ match = NAMESPACE_PARSER.match(line)
+ if match:
+ lang = match.group(1)
+ namespace = match.group(2)
+ namespaces[lang] = namespace
+ else:
+ match = TYPE_PARSER.match(line)
+ if match:
+ typename = match.group(1)
+ name = match.group(2)
+ types[typename].add(name)
+ return namespaces, types
+
+
+def calculate_entities(target, language):
+ """Calculates the service types and files generated for the given thrift IDL source.
+
+ Returns a generator of (source, namespace, service types, all types) tuples.
+ """
+ for source in target.sources:
+ ns, types = _calculate_entities(os.path.join(target.target_base, source))
+ yield (source, ns.get(language), types)
+
+
def select_thrift_binary(config, version=None):
"""Selects a thrift compiler binary matching the current os and architecture.
diff --git a/src/python/twitter/storage/generators/jdbc_code_generator.py b/src/python/twitter/storage/generators/jdbc_code_generator.py
index b9e0667..9230535 100644
--- a/src/python/twitter/storage/generators/jdbc_code_generator.py
+++ b/src/python/twitter/storage/generators/jdbc_code_generator.py
@@ -32,7 +32,7 @@ class JDBCCodeGenerator(CodeGenerator):
def gen_index_enum_values(self, table):
index_names = self.index_enum_names(table)
- index_values = map(lambda (index, name): '%s(%d)' % (name, index), enumerate(index_names))
+ index_values = map(lambda index_name: '%s(%d)' % (index_name[1], index_name[0]), enumerate(index_names))
return ',\n '.join(index_values) + ';'
def gen_set_record_field(self, col, index):
diff --git a/src/python/twitter/storage/generators/thrift_interface_generator.py b/src/python/twitter/storage/generators/thrift_interface_generator.py
index 755166a..1212332 100644
--- a/src/python/twitter/storage/generators/thrift_interface_generator.py
+++ b/src/python/twitter/storage/generators/thrift_interface_generator.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import os
import pkgutil
import re
@@ -45,7 +47,7 @@ class ThriftInterfaceGenerator(CodeGenerator):
def service_namespaces(self):
if self.schema.thriftNamespaces == None:
- print 'WARNING: no thrift namespaces defined for database %s' % self.schema.name
+ print('WARNING: no thrift namespaces defined for database %s' % self.schema.name)
return ''
else:
return '\n'.join(['namespace %s %s' % (lang, namespace) for lang, namespace in self.schema.thriftNamespaces.items()])
@@ -216,5 +218,5 @@ class ThriftInterfaceGenerator(CodeGenerator):
for table in self.schema.tablesByName.values():
methods += self._generate_service_methods(table)
if len(methods) == 0:
- print "WARNING: no methods generated for service. Did you specify the correct thriftstore database name?"
+ print("WARNING: no methods generated for service. Did you specify the correct thriftstore database name?")
return '\n'.join(methods)
diff --git a/src/python/twitter/storage/management/dispatcher.py b/src/python/twitter/storage/management/dispatcher.py
index fa98914..fbf0635c 100644
--- a/src/python/twitter/storage/management/dispatcher.py
+++ b/src/python/twitter/storage/management/dispatcher.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import sys
import traceback
@@ -22,7 +24,7 @@ class Dispatcher(object):
try:
cmd = self.cmds[cmd_name](self.args)
cmd.run()
- except CommandError, e:
+ except CommandError as e:
self._on_cmd_error(e, e.show_usage)
def _load_commands(self, commands):
@@ -42,7 +44,7 @@ class Dispatcher(object):
group.sort()
usage += '\n\n\t%s:' % CommandGroups.GROUP_NAMES[group_id]
usage += ''.join(''.join(map(lambda cmd: "\n\t\t%s: %s" % (self.cmds[cmd].name, self.cmds[cmd].__doc__), group)))
- print "%s\n" % usage
+ print("%s\n" % usage)
def _usage_and_exit(self, parser=None):
if parser == None:
@@ -52,13 +54,13 @@ class Dispatcher(object):
sys.exit(-1)
def _error_and_exit(self, msg, parser=None):
- print >> sys.stderr, 'ERROR: ' + msg
+ print('ERROR: ' + msg, file=sys.stderr)
self._usage_and_exit(parser)
def _on_cmd_error(self, exc, show_usage):
- print >> sys.stderr, 'ERROR: ' + str(exc)
+ print('ERROR: ' + str(exc), file=sys.stderr)
if exc.cause_traceback != None:
- print >> sys.stderr, exc.cause_traceback
+ print(exc.cause_traceback, file=sys.stderr)
if show_usage:
self._usage_and_exit(exc.parser)
else:
diff --git a/src/python/twitter/thrift/descriptors/BUILD b/src/python/twitter/thrift/descriptors/BUILD
index ddedc22..53c3db3 100644
--- a/src/python/twitter/thrift/descriptors/BUILD
+++ b/src/python/twitter/thrift/descriptors/BUILD
@@ -17,7 +17,7 @@
python_library(
name = 'descriptors',
dependencies = [
- python_requirement('antlr_python_runtime==3.1.3'),
+ pants('3rdparty/python:antlr-3.1.3'),
pants('src/antlr/twitter/thrift/descriptors'),
pants('src/python/twitter/thrift/text'),
pants('src/thrift/com/twitter/thrift/descriptors:descriptors-py'),
diff --git a/src/python/twitter/thrift/text/thrift_json_decoder.py b/src/python/twitter/thrift/text/thrift_json_decoder.py
index 9e902c4..59144f3 100644
--- a/src/python/twitter/thrift/text/thrift_json_decoder.py
+++ b/src/python/twitter/thrift/text/thrift_json_decoder.py
@@ -72,7 +72,7 @@ class ThriftJSONDecoder(json.JSONDecoder):
elif ttype == TType.BOOL:
ret = not not val
else:
- raise Exception, 'Unrecognized thrift field type: %d' % ttype
+ raise Exception('Unrecognized thrift field type: %d' % ttype)
return ret
def json_to_thrift(json_str, root_thrift_class):
diff --git a/src/thrift/com/twitter/service/BUILD b/src/thrift/com/twitter/service/BUILD
index 1b3d80d..9ec3acb 100644
--- a/src/thrift/com/twitter/service/BUILD
+++ b/src/thrift/com/twitter/service/BUILD
@@ -6,9 +6,15 @@ import sys
import re
import shutil
import tempfile
-import urllib2
from twitter.common.dirutil import Fileset
+from twitter.common.lang import Compatibility
+
+if Compatibility.PY3:
+ import urllib.request as urllib_request
+else:
+ import urllib2 as urllib_request
+
class RemotePythonThrift(object):
# TODO(wickman) Use the antlr thrift parser to just walk the thrift AST
@@ -30,7 +36,7 @@ class RemotePythonThrift(object):
fetch_path = self._base + '/' + source_file
print('Fetching %s' % fetch_path, file=sys.stderr)
target_file = os.path.join(self._staging_dir, source_file)
- url = urllib2.urlopen(fetch_path)
+ url = urllib_request.urlopen(fetch_path)
with open(target_file, 'wb') as fp:
fp.write(self.prefilter(url.read(), namespace=namespace))
yield target_file
diff --git a/src/thrift/com/twitter/tweetypie/service/BUILD b/src/thrift/com/twitter/tweetypie/service/BUILD
index 4adfa50..8de6571 100644
--- a/src/thrift/com/twitter/tweetypie/service/BUILD
+++ b/src/thrift/com/twitter/tweetypie/service/BUILD
@@ -136,7 +136,7 @@ class TweetyPieThrift(object):
# TODO(John Sirois): kill the PANTS_NEW logic when pants.new is rolled out
if not PANTS_NEW:
def printit(msg):
- print msg
+ print(msg)
self.extract(log=printit)
fullpaths = [os.path.join(self._here, relpath) for relpath in relpaths]
diff --git a/tests/python/twitter/pants/base/test_build_cache.py b/tests/python/twitter/pants/base/test_build_cache.py
index 103500b..9838756 100644
--- a/tests/python/twitter/pants/base/test_build_cache.py
+++ b/tests/python/twitter/pants/base/test_build_cache.py
@@ -14,20 +14,23 @@
# limitations under the License.
# ==================================================================================================
-from twitter.pants.base.build_cache import BuildCache
-from twitter.common.contextutil import temporary_dir
+
from contextlib import contextmanager
-import os
import hashlib
+import os
import shutil
import tempfile
+from twitter.pants.base.build_cache import BuildCache
+from twitter.common.contextutil import temporary_dir
+from twitter.common.lang import Compatibility
-TEST_CONTENT = 'muppet'
+TEST_CONTENT = b'muppet'
def expected_hash(tf):
- return hashlib.sha1(os.path.basename(tf.name) + TEST_CONTENT).hexdigest()
+ content = Compatibility.to_bytes(os.path.basename(tf.name)) + TEST_CONTENT
+ return hashlib.sha1(content).hexdigest()
@contextmanager
@@ -58,7 +61,7 @@ def test_needs_update_after_change():
cache.update(key, [f.name])
assert not cache.needs_update(key)
f.truncate()
- f.write('elmo')
+ f.write(b'elmo')
f.flush()
key = cache.key_for('test', [f.name])
assert cache.needs_update(key)
@@ -76,5 +79,5 @@ def test_use_cache():
assert not os.path.exists(abs_fn)
cache.use_cached_files(key, lambda s, d: shutil.copyfile(s, os.path.join(staging, d)))
assert os.path.exists(abs_fn)
- with open(abs_fn) as fd:
+ with open(abs_fn, 'rb') as fd:
assert fd.read() == TEST_CONTENT
diff --git a/tests/python/twitter/pants/java/test_open_jar.py b/tests/python/twitter/pants/java/test_open_jar.py
index 833e757..0cc18f5 100644
--- a/tests/python/twitter/pants/java/test_open_jar.py
+++ b/tests/python/twitter/pants/java/test_open_jar.py
@@ -28,8 +28,8 @@ from twitter.common.lang import Compatibility
from twitter.pants.java import open_jar
-class OpenJarTest(unittest.TestCase):
+class OpenJarTest(unittest.TestCase):
@contextmanager
def jarfile(self):
with temporary_file() as fd:
@@ -76,14 +76,14 @@ class OpenJarTest(unittest.TestCase):
dir = os.path.join(chroot, 'a/b/c')
safe_mkdir(dir)
data_file = os.path.join(dir, 'd.txt')
- with open(data_file, 'w') as fd:
- fd.write('e')
+ with open(data_file, 'wb') as fd:
+ fd.write(b'e')
with self.jarfile() as jarfile:
with open_jar(jarfile, 'w') as jar:
jar.write(data_file, 'f/g/h')
with open_jar(jarfile) as jar:
self.assertEquals(['f/', 'f/g/', 'f/g/h'], jar.namelist())
- self.assertEquals('e', jar.read('f/g/h'))
+ self.assertEquals(b'e', jar.read('f/g/h'))
def test_writestr(self):
def assert_writestr(path, contents, *entries):
@@ -94,5 +94,5 @@ class OpenJarTest(unittest.TestCase):
self.assertEquals(list(entries), jar.namelist())
self.assertEquals(contents, jar.read(path))
- assert_writestr('a.txt', 'b', 'a.txt')
- assert_writestr('a/b/c.txt', 'd', 'a/', 'a/b/', 'a/b/c.txt')
+ assert_writestr('a.txt', b'b', 'a.txt')
+ assert_writestr('a/b/c.txt', b'd', 'a/', 'a/b/', 'a/b/c.txt')
diff --git a/tests/python/twitter/pants/python/test_python_chroot.py b/tests/python/twitter/pants/python/test_python_chroot.py
index f629750..08766b1 100644
--- a/tests/python/twitter/pants/python/test_python_chroot.py
+++ b/tests/python/twitter/pants/python/test_python_chroot.py
@@ -20,7 +20,8 @@ from twitter.common.contextutil import temporary_file
from twitter.common.python.platforms import Platform
from twitter.pants.base import Config
-from twitter.pants.python.python_chroot import get_platforms
+from twitter.pants.python.resolver import get_platforms
+
class PythonChrootTest(unittest.TestCase):
def setUp(self):
diff --git a/tests/python/twitter/pants/scm/test_git.py b/tests/python/twitter/pants/scm/test_git.py
index 83e0dd4..e5d9a17 100644
--- a/tests/python/twitter/pants/scm/test_git.py
+++ b/tests/python/twitter/pants/scm/test_git.py
@@ -18,7 +18,10 @@ import os
import subprocess
import unittest
-from itertools import izip_longest
+try:
+ from itertools import izip_longest as zip_longest
+except ImportError:
+ from itertools import zip_longest
import pytest
@@ -32,13 +35,19 @@ class Version(object):
def __init__(self, text):
self._components = map(int, text.split('.'))
- def __cmp__(self, other):
- for ours, theirs in izip_longest(self._components, other._components, fillvalue=0):
- difference = cmp(ours, theirs)
+ def _compare(self, other):
+ for ours, theirs in zip_longest(self._components, other._components, fillvalue=0):
+ difference = (ours > theirs) - (ours < theirs)
if difference != 0:
return difference
return 0
+ def __lt__(self, other):
+ return self._compare(other) < 0
+
+ def __eq__(self, other):
+ return self._compare(other) == 0
+
class VersionTest(unittest.TestCase):
def test_equal(self):
@@ -64,7 +73,7 @@ def git_version():
process = subprocess.Popen(['git', '--version'], stdout=subprocess.PIPE)
(stdout, stderr) = process.communicate()
assert process.returncode == 0, "Failed to determine git version."
- return Version(stdout.split(' ').pop())
+ return Version(stdout.decode('utf8').split(' ').pop())
@pytest.mark.skipif("git_version() < Version('1.7.10')")
diff --git a/tests/python/twitter/pants/tasks/__init__.py b/tests/python/twitter/pants/tasks/__init__.py
index 96bf740..e01aacf 100644
--- a/tests/python/twitter/pants/tasks/__init__.py
+++ b/tests/python/twitter/pants/tasks/__init__.py
@@ -17,10 +17,8 @@
from contextlib import closing
from optparse import OptionGroup, OptionParser
from StringIO import StringIO
-import pytest
from twitter.common.contextutil import temporary_file
-
from twitter.pants.base import Config
from twitter.pants.build_root_test import BuildRootTest
from twitter.pants.commands.goal import SpecParser
@@ -28,6 +26,8 @@ from twitter.pants.goal import Context, Mkflag
from twitter.pants.tasks import Task
from twitter.pants.tasks.console_task import ConsoleTask
+import pytest
+
def prepare_task(task_type, config=None, args=None, targets=None, **kwargs):
"""Prepares a Task for execution.
diff --git a/tests/python/twitter/pants/tasks/test_context.py b/tests/python/twitter/pants/tasks/test_context.py
index 678b229..a041f09 100644
--- a/tests/python/twitter/pants/tasks/test_context.py
+++ b/tests/python/twitter/pants/tasks/test_context.py
@@ -33,6 +33,7 @@ class MockTarget(object):
for dep in self.dependencies:
dep.walk(work)
+
class ContextTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment